Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
373,600
def sync(self): log.info("syncing %r" % self.path) self.folder = self.jfs.get(self.path) self.synced = True
Update state of folder from Jottacloud server
373,601
def get_submissions_multiple_assignments_by_sis_id( self, is_section, sis_id, students=None, assignments=None, **params): if is_section: return self.get_submissions_multiple_assignments( is_section, self._sis_id(sis_id, ), students, assignments, **params) else: return self.get_submissions_multiple_assignments( is_section, self._sis_id(sis_id, ), students, assignments, **params)
List submissions for multiple assignments by course/section sis id and optionally student https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.for_students
373,602
def is_group(value): if type(value) == str: try: entry = grp.getgrnam(value) value = entry.gr_gid except KeyError: err_message = (.format(value)) raise validate.VdtValueError(err_message) return value elif type(value) == int: try: grp.getgrgid(value) except KeyError: err_message = (.format(value)) raise validate.VdtValueError(err_message) return value else: err_message = () raise validate.VdtTypeError(err_message)
Check whether groupname or gid as argument exists. if this function recieved groupname, convert gid and exec validation.
373,603
def configure(logstash_host=None, logstash_port=None, logdir=None): if not (logstash_host or logstash_port or logdir): raise ValueError() config.logstash.host = logstash_host or config.logstash.host config.logstash.port = logstash_port or config.logstash.port config.logdir = logdir or config.logdir create_logdir(config.logdir)
Configuration settings.
373,604
def blockreplace(path, marker_start=, marker_end=, content=, append_if_not_found=False, prepend_if_not_found=False, backup=, dry_run=False, show_changes=True, append_newline=False, insert_before_match=None, insert_after_match=None): s important to ensure that your marker includes the beginning of the text you wish to replace. content The content to be used between the two lines identified by marker_start and marker_stop. append_if_not_found : False If markers are not found and set to ``True`` then, the markers and content will be appended to the file. prepend_if_not_found : False If markers are not found and set to ``True`` then, the markers and content will be prepended to the file. insert_before_match If markers are not found, this parameter can be set to a regex which will insert the block before the first found occurrence in the file. .. versionadded:: Neon insert_after_match If markers are not found, this parameter can be set to a regex which will insert the block after the first found occurrence in the file. .. versionadded:: Neon backup The file extension to use for a backup of the file if any edit is made. Set to ``False`` to skip making a backup. dry_run : False If ``True``, do not make any edits to the file and simply return the changes that *would* be made. show_changes : True Controls how changes are presented. If ``True``, this function will return a unified diff of the changes made. If False, then it will return a boolean (``True`` if any changes were made, otherwise ``False``). append_newline : False Controls whether or not a newline is appended to the content block. If the value of this argument is ``True`` then a newline will be added to the content block. If it is ``False``, then a newline will *not* be added to the content block. If it is ``None`` then a newline will only be added to the content block if it does not already end in a newline. .. versionadded:: 2016.3.4 .. versionchanged:: 2017.7.5,2018.3.1 New behavior added when value is ``None``. .. versionchanged:: 2019.2.0 The default value of this argument will change to ``None`` to match the behavior of the :py:func:`file.blockreplace state <salt.states.file.blockreplace>` CLI Example: .. code-block:: bash salt file.blockreplace /etc/hosts \\ $ True Only one of append_if_not_found, prepend_if_not_found, insert_before_match, and insert_after_match is permittedFile not found: {0}files.get_encodingfiles.is_binaryCannot perform string replacements on a binary file: {0}RegEx expected in insert_before_match parameter.RegEx expected in insert_after_match parameter.\n\r\n\n\r\nre on the last line of the content block if append_newline: lines.append(content_line + linesep) lines.append(end_line) else: lines.append(content_line + end_line) else: lines.append(end_line) return lines try: fi_file = io.open(path, mode=, encoding=file_encoding, newline=) for line in fi_file: write_line_to_new_file = True if linesep is None: if line.endswith(): linesep = elif line.endswith(): linesep = else: in_block = True else: if in_block: block_found = True _add_content(linesep, lines=new_file, include_marker_start=False, end_line=line[marker_end_pos:]) orig_file.append(line) if write_line_to_new_file: new_file.append(line) except (IOError, OSError) as exc: raise CommandExecutionError( .format(path, exc) ) finally: if linesep is None: if show_changes: return diff return has_changes
.. versionadded:: 2014.1.0 Replace content of a text block in a file, delimited by line markers A block of content delimited by comments can help you manage several lines entries without worrying about old entries removal. .. note:: This function will store two copies of the file in-memory (the original version and the edited version) in order to detect changes and only edit the targeted file if necessary. path Filesystem path to the file to be edited marker_start The line content identifying a line as the start of the content block. Note that the whole line containing this marker will be considered, so whitespace or extra content before or after the marker is included in final output marker_end The line content identifying the end of the content block. As of versions 2017.7.5 and 2018.3.1, everything up to the text matching the marker will be replaced, so it's important to ensure that your marker includes the beginning of the text you wish to replace. content The content to be used between the two lines identified by marker_start and marker_stop. append_if_not_found : False If markers are not found and set to ``True`` then, the markers and content will be appended to the file. prepend_if_not_found : False If markers are not found and set to ``True`` then, the markers and content will be prepended to the file. insert_before_match If markers are not found, this parameter can be set to a regex which will insert the block before the first found occurrence in the file. .. versionadded:: Neon insert_after_match If markers are not found, this parameter can be set to a regex which will insert the block after the first found occurrence in the file. .. versionadded:: Neon backup The file extension to use for a backup of the file if any edit is made. Set to ``False`` to skip making a backup. dry_run : False If ``True``, do not make any edits to the file and simply return the changes that *would* be made. show_changes : True Controls how changes are presented. If ``True``, this function will return a unified diff of the changes made. If False, then it will return a boolean (``True`` if any changes were made, otherwise ``False``). append_newline : False Controls whether or not a newline is appended to the content block. If the value of this argument is ``True`` then a newline will be added to the content block. If it is ``False``, then a newline will *not* be added to the content block. If it is ``None`` then a newline will only be added to the content block if it does not already end in a newline. .. versionadded:: 2016.3.4 .. versionchanged:: 2017.7.5,2018.3.1 New behavior added when value is ``None``. .. versionchanged:: 2019.2.0 The default value of this argument will change to ``None`` to match the behavior of the :py:func:`file.blockreplace state <salt.states.file.blockreplace>` CLI Example: .. code-block:: bash salt '*' file.blockreplace /etc/hosts '#-- start managed zone foobar : DO NOT EDIT --' \\ '#-- end managed zone foobar --' $'10.0.1.1 foo.foobar\\n10.0.1.2 bar.foobar' True
373,605
def add_sync_methods(cls): for name in cls.__dict__.keys(): if name.endswith(): sync_name = name[:-6] if not hasattr(cls, sync_name): setattr(cls, sync_name, _make_sync_method(name)) return cls
Class decorator to add synchronous methods corresponding to async methods. This modifies the class in place, adding additional methods to it. If a synchronous method of a given name already exists it is not replaced. Args: cls: A class. Returns: The same class, modified in place.
373,606
def to_download(): first_day = parse(interval_first) last_day = parse(interval_last) format_change = parse() one_day = datetime.timedelta(1) cur_day = first_day url_list = [] while cur_day < last_day: fname = filename.format(day=cur_day.strftime("%Y%m%d")) if cur_day > format_change: cur_day += one_day url = base_url.format(year_month=cur_day.strftime("%Y.%m"), file_day=cur_day.strftime("%Y%m%d")) else: url = base_url_old.format(year_month=cur_day.strftime("%Y.%m"), file_day=cur_day.strftime("%Y%m%d")) cur_day += one_day url_list.append((fname, url)) return sorted(url_list, key=lambda tup: tup[0], reverse=True)
Build interval of urls to download. We always get the first file of the next day. Ex: 2013-01-01 => 2013-01-02.0000
373,607
def project_new_folder(object_id, input_params={}, always_retry=True, **kwargs): return DXHTTPRequest( % object_id, input_params, always_retry=always_retry, **kwargs)
Invokes the /project-xxxx/newFolder API method. For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FnewFolder
373,608
def parent_frame_arguments(): arg_names, variable_arg_name, keyword_arg_name, local_vars = ( tf_inspect._inspect.getargvalues( tf_inspect._inspect.stack()[1][0])) local_vars.pop(variable_arg_name, {}) keyword_args = local_vars.pop(keyword_arg_name, {}) final_args = {} for arg_name in arg_names: final_args[arg_name] = local_vars.pop(arg_name) final_args.update(keyword_args) return final_args
Returns parent frame arguments. When called inside a function, returns a dictionary with the caller's function arguments. These are positional arguments and keyword arguments (**kwargs), while variable arguments (*varargs) are excluded. When called at global scope, this will return an empty dictionary, since there are no arguments. WARNING: If caller function argument names are overloaded before invoking this method, then values will reflect the overloaded value. For this reason, we recommend calling `parent_frame_arguments` at the beginning of the function.
373,609
def _generate_annotation_type_class(self, ns, annotation_type): self.emit(.format( class_name_for_annotation_type(annotation_type, ns))) with self.indent(): if annotation_type.has_documented_type_or_params(): self.emit() self.emit() self._generate_annotation_type_class_slots(annotation_type) self._generate_annotation_type_class_init(ns, annotation_type) self._generate_annotation_type_class_properties(ns, annotation_type) self.emit()
Defines a Python class that represents an annotation type in Stone.
373,610
def G(self, T): h = self.DHref s = self.Sref for Tmax in sorted([float(TT) for TT in self._Cp_records.keys()]): h = h + self._Cp_records[str(Tmax)].H(T) s = s + self._Cp_records[str(Tmax)].S(T) if T <= Tmax: return h - T * s + self.G_mag(T) Tmax = max([float(TT) for TT in self._Cp_records.keys()]) h = h + self.Cp(Tmax)*(T - Tmax) s = s + self.Cp(Tmax)*math.log(T / Tmax) return h - T * s + self.G_mag(T)
Calculate the heat capacity of the compound phase at the specified temperature. :param T: [K] temperature :returns: [J/mol] The Gibbs free energy of the compound phase.
373,611
def create(ctx, name, description, tags, private, init): try: tags = tags.split() if tags else None project_dict = dict(name=name, description=description, is_public=not private, tags=tags) project_config = ProjectConfig.from_dict(project_dict) except ValidationError: Printer.print_error() sys.exit(1) try: _project = PolyaxonClient().project.create_project(project_config) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error(.format(name)) Printer.print_error(.format(e)) sys.exit(1) Printer.print_success("Project `{}` was created successfully.".format(_project.name)) if init: ctx.obj = {} ctx.invoke(init_project, project=name)
Create a new project. Uses [Caching](/references/polyaxon-cli/#caching) Example: \b ```bash $ polyaxon project create --name=cats-vs-dogs --description="Image Classification with DL" ```
373,612
def _write_source_data(self, sources): for i, source in enumerate(sources): self._write_source(source)
See src/jjk/measure3
373,613
def join_pretty_tensors(tensors, output, join_function=None, name=): if not tensors: raise ValueError() with output.g.name_scope(name): if join_function is None: last_dim = len(tensors[0].shape) - 1 return output.with_tensor(tf.concat(tensors, last_dim)) else: return output.with_tensor(join_function(tensors))
Joins the list of pretty_tensors and sets head of output_pretty_tensor. Args: tensors: A sequence of Layers or SequentialLayerBuilders to join. output: A pretty_tensor to set the head with the result. join_function: A function to join the tensors, defaults to concat on the last dimension. name: A name that is used for the name_scope Returns: The result of calling with_tensor on output Raises: ValueError: if pretty_tensors is None or empty.
373,614
def _add_error(self, *args, **kwargs): if kwargs.get(, None): error = ConfigError.create_from_yaml_node( *args, **kwargs ) elif self._value_node: error = ConfigError.create_from_yaml_node( node=self._value_node, *args, **kwargs ) else: error = ConfigError(*args, **kwargs) self._errors.append(error)
Convenience function to add an error to this object, with line numbers An error title or description should not accidentally leak self._value, for privacy/redaction purposes. :rtype: None
373,615
def set_weather_from_metar( metar: typing.Union[Metar.Metar, str], in_file: typing.Union[str, Path], out_file: typing.Union[str, Path] = None ) -> typing.Tuple[typing.Union[str, None], typing.Union[str, None]]: error, metar = custom_metar.CustomMetar.get_metar(metar) if error: return error, None if metar: LOGGER.debug(, metar.code) in_file = elib.path.ensure_file(in_file) if out_file is None: out_file = in_file else: out_file = elib.path.ensure_file(out_file, must_exist=False) LOGGER.debug(, in_file, out_file) try: LOGGER.debug() _mission_weather = mission_weather.MissionWeather(metar) with Miz(str(in_file)) as miz: _mission_weather.apply_to_miz(miz) miz.zip(str(out_file)) return None, f except ValueError: error = f \ f \ f return error, None
Applies the weather from a METAR object to a MIZ file Args: metar: metar object in_file: path to MIZ file out_file: path to output MIZ file (will default to in_file) Returns: tuple of error, success
373,616
def SensorsTriggersNotificationsDelete(self, sensor_id, trigger_id, notification_id): if self.__SenseApiCall__(.format(sensor_id, trigger_id, notification_id), ): return True else: self.__error__ = "api call unsuccessful" return False
Disconnect a notification from a sensor-trigger combination. @param sensor_id (int) - Sensor id if the sensor-trigger combination. @param trigger_id (int) - Trigger id of the sensor-trigger combination. @param notification_id (int) - Notification id of the notification to disconnect. @param (bool) - Boolean indicating whether SensorstriggersNotificationsDelete was successful.
373,617
def write_users(dburl): data = { : , : , : , : r r, } for p in PERMISSIONS: data[p] = db = redis.StrictRedis.from_url(dburl) db.hmset(, data) db.hset(, , ) if not db.exists(): db.incr() print("Username: admin") print("Password: admin") return 0
Write users to the DB.
373,618
def to_dict(self): data = {: self.aid, : self.number, : self.element} for coord in {, , }: if getattr(self, coord) is not None: data[coord] = getattr(self, coord) if self.charge is not 0: data[] = self.charge return data
Return a dictionary containing Atom data.
373,619
def _disappeared(self, fd, path, **params): log = self._getparam(, self._discard, **params) log.debug("Path %r removed or renamed, handling removal", path) self._close(fd) if self._mode == WF_POLLING and fd in self._poll_stat: del self._poll_stat[fd] if self._mode == WF_INOTIFYX and path in self._inx_inode: del self._inx_inode[path] del self.fds_open[fd] del self.paths_open[path] if self.paths[path]: try: if self._add_file(path, **params): log.debug("Path %r immediately reappeared, pending transition skipped", path) return except Exception as e: log.debug("Path %r reappearance check failed -- %s", path, e) log.debug("Path %r marked as pending", path) self.paths_pending[path] = True else: del self.paths[path] raise Exception("Path %r has been removed or renamed" % path)
Called when an open path is no longer acessible. This will either move the path to pending (if the 'missing' param is set for the file), or fire an exception.
373,620
def set_timestamp_to_current(self): self.timestamp = pytz.UTC.localize(datetime.datetime.utcnow())
Set timestamp to current time utc :rtype: None
373,621
def seat_button_count(self): if self.type != EventType.TABLET_TOOL_BUTTON: raise AttributeError(_wrong_prop.format(self.type)) return self._libinput.libinput_event_tablet_tool_get_seat_button_count( self._handle)
The total number of buttons pressed on all devices on the associated seat after the the event was triggered. For events that are not of type :attr:`~libinput.constant.EventType.TABLET_TOOL_BUTTON`, this property raises :exc:`AttributeError`. Returns: int: The seat wide pressed button count for the key of this event.
373,622
def replace_pipe(self, name, component): if name not in self.pipe_names: raise ValueError(Errors.E001.format(name=name, opts=self.pipe_names)) self.pipeline[self.pipe_names.index(name)] = (name, component)
Replace a component in the pipeline. name (unicode): Name of the component to replace. component (callable): Pipeline component. DOCS: https://spacy.io/api/language#replace_pipe
373,623
def sendNotification(snmpDispatcher, authData, transportTarget, notifyType, *varBinds, **options): sysUpTime = v2c.apiTrapPDU.sysUpTime snmpTrapOID = v2c.apiTrapPDU.snmpTrapOID def _ensureVarBinds(varBinds): if not varBinds or varBinds[0][0] != sysUpTime: varBinds.insert(0, (v2c.ObjectIdentifier(sysUpTime), v2c.TimeTicks(0))) for idx, varBind in enumerate(varBinds[2:]): if varBind[0] == snmpTrapOID: del varBinds[idx + 2] if varBinds[1][0] == snmpTrapOID: varBinds[1] = varBind else: varBinds.insert(1, varBind) break if varBinds[1][0] != snmpTrapOID: raise error.PySnmpError( ) return varBinds def _cbFun(snmpDispatcher, stateHandle, errorIndication, rspPdu, _cbCtx): if future.cancelled(): return errorStatus = v2c.apiTrapPDU.getErrorStatus(rspPdu) errorIndex = v2c.apiTrapPDU.getErrorIndex(rspPdu) varBinds = v2c.apiTrapPDU.getVarBinds(rspPdu) try: varBindsUnmade = VB_PROCESSOR.unmakeVarBinds(snmpDispatcher.cache, varBinds, lookupMib) except Exception as e: future.set_exception(e) else: future.set_result( (errorIndication, errorStatus, errorIndex, varBindsUnmade) ) lookupMib = options.get() if not lookupMib and any(isinstance(x, (NotificationType, ObjectType)) for x in varBinds): lookupMib = True if lookupMib: varBinds = VB_PROCESSOR.makeVarBinds(snmpDispatcher.cache, varBinds) if notifyType == : reqPdu = v2c.TrapPDU() else: reqPdu = v2c.InformRequestPDU() v2c.apiTrapPDU.setDefaults(reqPdu) v2c.apiTrapPDU.setVarBinds(reqPdu, varBinds) varBinds = v2c.apiTrapPDU.getVarBinds(reqPdu) v2c.apiTrapPDU.setVarBinds(reqPdu, _ensureVarBinds(varBinds)) if authData.mpModel == 0: reqPdu = rfc2576.v2ToV1(reqPdu) future = asyncio.Future() snmpDispatcher.sendPdu(authData, transportTarget, reqPdu, cbFun=_cbFun) if notifyType == : def __trapFun(future): if future.cancelled(): return future.set_result((None, 0, 0, [])) loop = asyncio.get_event_loop() loop.call_soon(__trapFun, future) return future
Creates a generator to send SNMP notification. When iterator gets advanced by :py:mod:`asyncio` main loop, SNMP TRAP or INFORM notification is send (:RFC:`1905#section-4.2.6`). The iterator yields :py:class:`asyncio.Future` which gets done whenever response arrives or error occurs. Parameters ---------- snmpDispatcher: :py:class:`~pysnmp.hlapi.v1arch.asyncore.SnmpDispatcher` Class instance representing asynio-based asynchronous event loop and associated state information. authData: :py:class:`~pysnmp.hlapi.v1arch.CommunityData` Class instance representing SNMPv1/v2c credentials. transportTarget: :py:class:`~pysnmp.hlapi.v1arch.asyncio.UdpTransportTarget` or :py:class:`~pysnmp.hlapi.v1arch.asyncio.Udp6TransportTarget` Class instance representing transport type along with SNMP peer address. notifyType : str Indicates type of notification to be sent. Recognized literal values are *trap* or *inform*. \*varBinds: :class:`tuple` of OID-value pairs or :py:class:`~pysnmp.smi.rfc1902.ObjectType` or :py:class:`~pysnmp.smi.rfc1902.NotificationType` One or more objects representing MIB variables to place into SNMP notification. It could be tuples of OID-values or :py:class:`~pysnmp.smi.rfc1902.ObjectType` class instances of :py:class:`~pysnmp.smi.rfc1902.NotificationType` objects. Besides user variable-bindings, SNMP Notification PDU requires at least two variable-bindings to be present: 0. SNMPv2-MIB::sysUpTime.0 = <agent uptime> 1. SNMPv2-SMI::snmpTrapOID.0 = <notification ID> When sending SNMPv1 TRAP, more variable-bindings could be present: 2. SNMP-COMMUNITY-MIB::snmpTrapAddress.0 = <agent-IP> 3. SNMP-COMMUNITY-MIB::snmpTrapCommunity.0 = <snmp-community-name> 4. SNMP-COMMUNITY-MIB::snmpTrapEnterprise.0 = <enterprise-OID> If user does not supply some or any of the above variable-bindings or if they are at the wrong positions, the system will add/reorder the missing ones automatically. On top of that, some notification types imply including some additional variable-bindings providing additional details on the event being reported. Therefore it is generally easier to use :py:class:`~pysnmp.smi.rfc1902.NotificationType` object which will help adding relevant variable-bindings. Other Parameters ---------------- \*\*options : Request options: * `lookupMib` - load MIB and resolve response MIB variables at the cost of slightly reduced performance. Default is `False`, unless :py:class:`~pysnmp.smi.rfc1902.ObjectType` or :py:class:`~pysnmp.smi.rfc1902.NotificationType` is present among `varBinds` in which case `lookupMib` gets automatically enabled. Yields ------ errorIndication: str True value indicates SNMP engine error. errorStatus: str True value indicates SNMP PDU error. errorIndex: int Non-zero value refers to `varBinds[errorIndex-1]` varBinds: tuple A sequence of OID-value pairs in form of base SNMP types (if `lookupMib` is `False`) or :py:class:`~pysnmp.smi.rfc1902.ObjectType` class instances (if `lookupMib` is `True`) representing MIB variables returned in SNMP response. Raises ------ PySnmpError Or its derivative indicating that an error occurred while performing SNMP operation. Examples -------- >>> import asyncio >>> from pysnmp.hlapi.asyncio import * >>> >>> @asyncio.coroutine ... def run(): ... errorIndication, errorStatus, errorIndex, varBinds = yield from sendNotification( ... SnmpDispatcher(), ... CommunityData('public'), ... UdpTransportTarget(('demo.snmplabs.com', 162)), ... 'trap', ... NotificationType(ObjectIdentity('IF-MIB', 'linkDown'))) ... print(errorIndication, errorStatus, errorIndex, varBinds) ... >>> asyncio.get_event_loop().run_until_complete(run()) (None, 0, 0, []) >>>
373,624
def swo_set_emu_buffer_size(self, buf_size): buf = ctypes.c_uint32(buf_size) res = self._dll.JLINKARM_SWO_Control(enums.JLinkSWOCommands.SET_BUFFERSIZE_EMU, ctypes.byref(buf)) if res < 0: raise errors.JLinkException(res) return None
Sets the size of the buffer used by the J-Link to collect SWO data. Args: self (JLink): the ``JLink`` instance buf_size (int): the new size of the emulator buffer Returns: ``None`` Raises: JLinkException: on error
373,625
async def fetch(self, *args, timeout=None): r data = await self.__bind_execute(args, 0, timeout) return data
r"""Execute the statement and return a list of :class:`Record` objects. :param str query: Query text :param args: Query arguments :param float timeout: Optional timeout value in seconds. :return: A list of :class:`Record` instances.
373,626
def _get_crawled_urls(self, handle, request): try: content = six.text_type(handle.open(request).read(), "utf-8", errors="replace") soup = BeautifulSoup(content, "html.parser") tags = soup() for tag in tqdm(tags): href = tag.get("href") if href is not None: url = urllib.parse.urljoin(self.url, escape(href)) if url not in self: self.urls.append(url) except urllib.request.HTTPError as error: if error.code == 404: logger.warning("ERROR: %s -> %s for %s" % (error, error.url, self.url)) else: logger.warning("ERROR: %s for %s" % (error, self.url)) except urllib.request.URLError as error: logger.warning("ERROR: %s for %s" % (error, self.url)) raise urllib.request.URLError("URL entered is Incorrect")
Main method where the crawler html content is parsed with beautiful soup and out of the DOM, we get the urls
373,627
def get_account_db_class(cls) -> Type[BaseAccountDB]: if cls.account_db_class is None: raise AttributeError("No account_db_class set for {0}".format(cls.__name__)) return cls.account_db_class
Return the :class:`~eth.db.account.BaseAccountDB` class that the state class uses.
373,628
def encodeValue(value): if isinstance(value, (list, tuple)): return [common.AttributeValue(string_value=str(v)) for v in value] else: return [common.AttributeValue(string_value=str(value))]
TODO
373,629
def breadth_first(problem, graph_search=False, viewer=None): return _search(problem, FifoList(), graph_search=graph_search, viewer=viewer)
Breadth first search. If graph_search=True, will avoid exploring repeated states. Requires: SearchProblem.actions, SearchProblem.result, and SearchProblem.is_goal.
373,630
def convert_contentbody_to_new_type(self, content_data, old_representation, new_representation, callback=None): assert {old_representation, new_representation} < {"storage", "editor", "view", "export_view"} request_data = {"value": str(content_data), "representation": old_representation} return self._service_post_request("rest/api/contentbody/convert/{to}".format(to=new_representation), data=json.dumps(request_data), headers={"Content-Type": "application/json"}, callback=callback)
Converts between content body representations. Not all representations can be converted to/from other formats. Supported conversions: Source Representation | Destination Representation Supported -------------------------------------------------------------- "storage" | "view","export_view","editor" "editor" | "storage" "view" | None "export_view" | None :param content_data (string): The content data to transform. :param old_representation (string): The representation to convert from. :param new_representation (string): The representation to convert to. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the contentbody/convert/{to} endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
373,631
def type_to_string(f, map_types): if f.type in [1]: return "double" elif f.type in [2]: return "float" elif f.type in [3]: return "long" elif f.type in [4]: return "uint64" elif f.type in [5]: return "integer" elif f.type in [6]: return "fixed64" elif f.type in [7]: return "fixed32" elif f.type in [8]: return "boolean" elif f.type in [9]: return "string" elif f.type in [11, 14]: ref_name = f.ref_type if ref_name in map_types: ref_fields = map_types[ref_name] return { "type": "map", "key": " %s "% type_to_string(ref_fields["key"], map_types), "value": " %s "% type_to_string(ref_fields["value"], map_types) } else: kind = ":protobuf:message:`%s`" % simplify_name(f.ref_type) if f.label == 3: return "list of " + kind else: return kind elif f.type in [12]: return "bytes" elif f.type in [13]: return "uint32" elif f.type in [15]: return "sfixed32" elif f.type in [16]: return "sfixed64" elif f.type in [17]: return "sint32" elif f.type in [18]: return "sint64" else: raise Exception, f.type
Convert type info to pretty names, based on numbers from from FieldDescriptorProto https://developers.google.com/protocol-buffers/docs/reference/cpp/google.protobuf.descriptor.pb
373,632
def filter_labels(sent: Sequence[str], labels: Set[str] = None) -> List[str]: if labels: return [tok for tok in sent if tok in labels] return list(sent)
Returns only the tokens present in the sentence that are in labels.
373,633
def update_function_code(FunctionName, ZipFile=None, S3Bucket=None, S3Key=None, S3ObjectVersion=None, Publish=False, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: if ZipFile: if S3Bucket or S3Key or S3ObjectVersion: raise SaltInvocationError( ) r = conn.update_function_code(FunctionName=FunctionName, ZipFile=_filedata(ZipFile), Publish=Publish) else: if not S3Bucket or not S3Key: raise SaltInvocationError( ) args = { : S3Bucket, : S3Key, } if S3ObjectVersion: args[] = S3ObjectVersion r = conn.update_function_code(FunctionName=FunctionName, Publish=Publish, **args) if r: keys = (, , , , , , , , , , , , ) return {: True, : dict([(k, r.get(k)) for k in keys])} else: log.warning() return {: False} except ClientError as e: return {: False, : __utils__[](e)}
Upload the given code to the named lambda function. Returns {updated: true} if the function was updated and returns {updated: False} if the function was not updated. CLI Example: .. code-block:: bash salt myminion boto_lamba.update_function_code my_function ZipFile=function.zip
373,634
def _validate_min(self, proposal): min = proposal[] if min > self.max: raise TraitError() if min > self.value: self.value = min return min
Enforce min <= value <= max
373,635
def wait_for_logs_matching(self, matcher, timeout=10, encoding=, **logs_kwargs): wait_for_logs_matching( self.inner(), matcher, timeout=timeout, encoding=encoding, **logs_kwargs)
Wait for logs matching the given matcher.
373,636
def _to_chimera(M, N, L, q): "Converts a qubit's linear index to chimera coordinates." return (q // N // L // 2, (q // L // 2) % N, (q // L) % 2, q % L)
Converts a qubit's linear index to chimera coordinates.
373,637
def from_credentials(cls: Type[SigningKeyType], salt: Union[str, bytes], password: Union[str, bytes], scrypt_params: Optional[ScryptParams] = None) -> SigningKeyType: if scrypt_params is None: scrypt_params = ScryptParams() salt = ensure_bytes(salt) password = ensure_bytes(password) seed = scrypt(password, salt, scrypt_params.N, scrypt_params.r, scrypt_params.p, scrypt_params.seed_length) return cls(seed)
Create a SigningKey object from credentials :param salt: Secret salt passphrase credential :param password: Secret password credential :param scrypt_params: ScryptParams instance
373,638
def _write_entries(self, stream, entries, converter, properties=None): def iter_entries(): for c in entries: entry = converter(c) if entry is None: continue if properties is not None: entry = OrderedDict( (key, value) for key, value in iteritems(entry) if key == or key in properties) yield entry self._dump(stream, list(iter_entries()))
Write iterable of entries as YAML object to stream. Args: stream: File-like object. entries: Iterable of entries. converter: Conversion function from entry to YAML object. properties: Set of compartment properties to output (or None to output all).
373,639
async def scroll(self, value, mode=): self._check_executed() if mode == : if value < 0: raise NotSupportedError("Backwards scrolling not supported " "by this cursor") for _ in range(value): await self._read_next() self._rownumber += value elif mode == : if value < self._rownumber: raise NotSupportedError( "Backwards scrolling not supported by this cursor") end = value - self._rownumber for _ in range(end): await self._read_next() self._rownumber = value else: raise ProgrammingError("unknown scroll mode %s" % mode)
Scroll the cursor in the result set to a new position according to mode . Same as :meth:`Cursor.scroll`, but move cursor on server side one by one row. If you want to move 20 rows forward scroll will make 20 queries to move cursor. Currently only forward scrolling is supported. :param int value: move cursor to next position according to mode. :param str mode: scroll mode, possible modes: `relative` and `absolute`
373,640
def var_added(self, v): self.add_variable(v) self.window.set_size_request(400, 35 * len(self.widgets.keys())) self.window.show_all()
var was added in the bot while it ran, possibly by livecoding :param v: :return:
373,641
def mmGetCellActivityPlot(self, title="", showReset=False, resetShading=0.25, activityType="activeCells"): cellTrace = copy.deepcopy(self._mmTraces[activityType].data) for i in xrange(len(cellTrace)): cellTrace[i] = self.getCellIndices(cellTrace[i]) return self.mmGetCellTracePlot(cellTrace, self.numberOfCells(), activityType, title, showReset, resetShading)
Returns plot of the cell activity. @param title (string) an optional title for the figure @param showReset (bool) if true, the first set of cell activities after a reset will have a gray background @param resetShading (float) if showReset is true, this float specifies the intensity of the reset background with 0.0 being white and 1.0 being black @param activityType (string) The type of cell activity to display. Valid types include "activeCells" @return (Plot) plot
373,642
def get_matching_indexes(self, possible_hash, possible_range): matches = [ index for index in self.iter_query_indexes() if index.hash_key in possible_hash ] range_matches = [ index for index in matches if index.range_key in possible_range ] if range_matches: return range_matches return matches
Get all indexes that could be queried on using a set of keys. If any indexes match both hash AND range keys, indexes that only match the hash key will be excluded from the result. Parameters ---------- possible_hash : set The names of fields that could be used as the hash key possible_range : set The names of fields that could be used as the range key
373,643
def do_transform(self): if not self.transform: return try: self.latest_value = utils.Transform( expr=self.transform, value=self.latest_value, timedelta=self.time_between_updates().total_seconds()).result() except (TypeError, ValueError): logger.warn("Invalid transformation for metric %s", self.transfrom, self.pk) self.transform =
Apply the transformation (if it exists) to the latest_value
373,644
def links(xmrs): links = [] prelinks = [] _eps = xmrs._eps _hcons = xmrs._hcons _vars = xmrs._vars lsh = xmrs.labelset_heads lblheads = {v: lsh(v) for v, vd in _vars.items() if in vd[]} top = xmrs.top if top is not None: prelinks.append((0, top, None, top, _vars[top])) for nid, ep in _eps.items(): for role, val in ep[3].items(): if role == IVARG_ROLE or val not in _vars: continue prelinks.append((nid, ep[2], role, val, _vars[val])) for src, srclbl, role, val, vd in prelinks: if IVARG_ROLE in vd[]: tgtnids = [n for n in vd[][IVARG_ROLE] if not _eps[n].is_quantifier()] if len(tgtnids) == 0: continue tgt = tgtnids[0] tgtlbl = _eps[tgt][2] post = EQ_POST if srclbl == tgtlbl else NEQ_POST elif val in _hcons: lbl = _hcons[val][2] if lbl not in lblheads or len(lblheads[lbl]) == 0: continue tgt = lblheads[lbl][0] post = H_POST elif in vd[]: if val not in lblheads or len(lblheads[val]) == 0: continue tgt = lblheads[val][0] post = HEQ_POST else: continue links.append(Link(src, tgt, role, post)) for lbl, heads in lblheads.items(): if len(heads) > 1: first = heads[0] for other in heads[1:]: links.append(Link(other, first, BARE_EQ_ROLE, EQ_POST)) def _int(x): try: return int(x) except ValueError: return 0 return sorted( links, key=lambda link: (_int(link.start), _int(link.end), link.rargname) )
Return the list of Links for the *xmrs*.
373,645
def brightness_prob(self, clip=True): thresh = 0.11 bp = np.minimum(thresh, self.nir) / thresh if clip: bp[bp > 1] = 1 bp[bp < 0] = 0 return bp
The brightest water may have Band 5 reflectance as high as LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF.11 Equation 10 (Zhu and Woodcock, 2012) Parameters ---------- nir: ndarray clip: boolean Output ------ ndarray: brightness probability, constrained LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF..1
373,646
def put( self, path_or_tuple, folder_id=, overwrite=None, downsize=None, bits_api_fallback=True ): api_overwrite = self._translate_api_flag(overwrite, , []) api_downsize = self._translate_api_flag(downsize, ) name, src = self._process_upload_source(path_or_tuple) if not isinstance(bits_api_fallback, (int, float, long)): bits_api_fallback = bool(bits_api_fallback) if bits_api_fallback is not False: if bits_api_fallback is True: bits_api_fallback = self.api_put_max_bytes src.seek(0, os.SEEK_END) if src.tell() >= bits_api_fallback: if bits_api_fallback > 0: log.info( , *((float(v) / 2**20) for v in [src.tell(), bits_api_fallback]) ) if overwrite is not None and api_overwrite != : raise NoAPISupportError( .format(overwrite) ) if downsize is not None: log.info( , downsize ) file_id = self.put_bits(path_or_tuple, folder_id=folder_id) return self.info(file_id) return self( self._api_url_join(folder_id, , name), dict(overwrite=api_overwrite, downsize_photo_uploads=api_downsize), data=src, method=, auth_header=True )
Upload a file (object), possibly overwriting (default behavior) a file with the same "name" attribute, if it exists. First argument can be either path to a local file or tuple of "(name, file)", where "file" can be either a file-like object or just a string of bytes. overwrite option can be set to False to allow two identically-named files or "ChooseNewName" to let OneDrive derive some similar unique name. Behavior of this option mimics underlying API. downsize is a true/false API flag, similar to overwrite. bits_api_fallback can be either True/False or an integer (number of bytes), and determines whether method will fall back to using BITS API (as implemented by "put_bits" method) for large files. Default "True" (bool) value will use non-BITS file size limit (api_put_max_bytes, ~100 MiB) as a fallback threshold, passing False will force using single-request uploads.
373,647
def export_image3d(input, output, size=(800, 600), pcb_rotate=(0, 0, 0), timeout=20, showgui=False): showgui input = norm_path(input) output = norm_path(output) ext = os.path.splitext(input)[1] if ext not in []: raise ValueError( + str(input)) commands = [] eagle3d = Path(__file__).dirname() / ulp = (eagle3d / ).abspath() commands += [ + ulp] commands += [] def render(dir, f): templ = pov = Path(f.replace(, )) if pcb_rotate != (0, 0, 0): s = pov.bytes() s = s.replace(templ % (, 0), templ % (, pcb_rotate[0])) s = s.replace(templ % (, 0), templ % (, pcb_rotate[1])) s = s.replace(templ % (, 0), templ % (, pcb_rotate[2])) pov.write_bytes(s) fpng = Path(f.replace(, )) cmd = [] cmd += ["povray"] cmd += ["-d"] cmd += ["-a"] cmd += [ + str(size[0])] cmd += [ + str(size[1])] cmd += [ + fpng] cmd += [ + eagle3d] cmd += [pov] p = Proc(cmd).call() if not fpng.exists(): raise EagleError( % p) fpng.copy(output) command_eagle(input=input, timeout=timeout, commands=commands, showgui=showgui, callback=render)
Exporting eagle .brd file into 3D image file using Eagle3D and povray. GUI is not displayed if ``pyvirtualdisplay`` is installed. If export is blocked somehow (e.g. popup window is displayed) then after timeout operation is canceled with exception. Problem can be investigated by setting 'showgui' flag. :param input: eagle .brd file name :param output: image file name (.png) :param timeout: operation is canceled after this timeout (sec) :param showgui: eagle GUI is displayed :param size: tuple(width, size), image size :rtype: None
373,648
def stdout_avail(self): data = self.interpreter.stdout_write.empty_queue() if data: self.write(data)
Data is available in stdout, let's empty the queue and write it!
373,649
def removeOutliers(points, radius): isactor = False if isinstance(points, vtk.vtkActor): isactor = True poly = points.GetMapper().GetInput() else: src = vtk.vtkPointSource() src.SetNumberOfPoints(len(points)) src.Update() vpts = src.GetOutput().GetPoints() for i, p in enumerate(points): vpts.SetPoint(i, p) poly = src.GetOutput() removal = vtk.vtkRadiusOutlierRemoval() removal.SetInputData(poly) removal.SetRadius(radius) removal.SetNumberOfNeighbors(5) removal.GenerateOutliersOff() removal.Update() rpoly = removal.GetOutput() print(" removal.GetNumberOfPointsRemoved(), , poly.GetNumberOfPoints()) outpts = [] for i in range(rpoly.GetNumberOfPoints()): outpts.append(list(rpoly.GetPoint(i))) outpts = np.array(outpts) if not isactor: return outpts actor = vs.Points(outpts) return actor
Remove outliers from a cloud of points within the specified `radius` search. .. hint:: |clustering| |clustering.py|_
373,650
def get(self, key, env=None): if env is None: env = self.environment try: ret = self._settings[env][key] except KeyError: ret = None if ret is None: if key == "identity_class": env_var = self.env_dct.get("identity_type") ityp = os.environ.get(env_var) if ityp: return _import_identity(ityp) else: env_var = self.env_dct.get(key) if env_var is not None: ret = os.environ.get(env_var) return ret
Returns the config setting for the specified environment. If no environment is specified, the value for the current environment is returned. If an unknown key or environment is passed, None is returned.
373,651
async def strings(self, request: Optional[]=None) \ -> List[Tuple[Text, ...]]: if request: locale = await request.get_locale() else: locale = None return self.db.get(self.key, locale)
For the given request, find the list of strings of that intent. If the intent does not exist, it will raise a KeyError.
373,652
def collect(self): for root, dirname, files in walk(self.migration_home): for file_name in file_filter(files, "*.py"): file_name = file_name.replace(, ) file = None try: if file_name == : continue file, pathname, description = find_module( file_name, [root]) load_module(file_name, file, pathname, description) finally: if file is not None: file.close()
Walks self.migration_home and load all potential migration modules
373,653
def result(self,num): try: return self.all[num].result except KeyError: error( % num)
result(N) -> return the result of job N.
373,654
def reporter(self): logging.info() header = .format(.join(self.headers)) data = str() for sample in self.metadata: data += GenObject.returnattr(sample, ) data += GenObject.returnattr(sample.run, ) data += GenObject.returnattr(sample.general, ) data += GenObject.returnattr(sample.run, ) data += GenObject.returnattr(sample.run, ) data += GenObject.returnattr(sample.confindr, ) n50 = GenObject.returnattr(sample.quality_features_polished, , number=True) if n50 != : data += n50 else: data += data += GenObject.returnattr(sample.quality_features_polished, , number=True) data += GenObject.returnattr(sample.quality_features_polished, , number=True) data += GenObject.returnattr(sample.mapping, , number=True) data += GenObject.returnattr(sample.mapping, , number=True) data += GenObject.returnattr(sample.mapping, , number=True) data += GenObject.returnattr(sample.mapping, , number=True) data += GenObject.returnattr(sample.quality_features_polished, , number=True) data += GenObject.returnattr(sample.mash, ) data += GenObject.returnattr(sample.mash, ) data += GenObject.returnattr(sample.sixteens_full, ) try: if sample.rmlst.matches == 53: rmlst_seq_type = GenObject.returnattr(sample.rmlst, ) rmlst_seq_type = rmlst_seq_type if rmlst_seq_type != else data += rmlst_seq_type else: data += except AttributeError: data += try: if sample.mlst.matches == 7: data += GenObject.returnattr(sample.mlst, ) else: data += except AttributeError: data += try: gene_set = {gene.split()[0] for gene in sample.mlst.results} for gene in sorted(gene_set): allele_list = list() for allele in sample.mlst.results: if gene in allele: allele_list.append(allele) if len(allele_list) > 1: data += .format(.join(allele_list)) else: data += allele_list[0] + if len(gene_set) < 7: data += (7 - len(gene_set)) * except AttributeError: data += data += GenObject.returnattr(sample.coregenome, ) try: if .join(sample.serosippr.o_set) == : otype = else: otype = .format(oset=.join(sample.serosippr.o_set), opid=sample.serosippr.best_o_pid) if .join(sample.serosippr.h_set) == : htype = else: htype = .format(hset=.join(sample.serosippr.h_set), hpid=sample.serosippr.best_h_pid) serotype = .format(otype=otype, htype=htype) data += serotype if serotype != else except AttributeError: data += data += GenObject.returnattr(sample.sistr, ).rstrip() data += GenObject.returnattr(sample.sistr, ) data += GenObject.returnattr(sample.sistr, ) data += GenObject.returnattr(sample.sistr, ).rstrip() data += GenObject.returnattr(sample.sistr, ).rstrip() data += GenObject.returnattr(sample.sistr, ) try: if sample.genesippr.report_output: data += .join(sample.genesippr.report_output) + else: data += except AttributeError: data += data += GenObject.returnattr(sample.legacy_vtyper, ) if sample.resfinder_assembled.pipelineresults: for resistance, resistance_set in sorted(sample.resfinder_assembled.pipelineresults.items()): data += .format(res=resistance.replace(, ), r_set=.join(sorted(list(resistance_set)))) data += data += else: data += data += metadatareport.write(header) metadatareport.write(cleandata)
Creates the metadata report by pulling specific attributes from the metadata objects
373,655
def cfg_to_dot(self, filename): with open(filename, , encoding=) as f: f.write() for node in self.nodes: f.write(.format(node.node_id, str(node))) for son in node.sons: f.write(.format(node.node_id, son.node_id)) f.write("}\n")
Export the function to a dot file Args: filename (str)
373,656
def _and_join(self, terms): if len(terms) > 1: return .join([self._or_join(t) for t in terms]) else: return self._or_join(terms[0])
Joins terms using AND operator. Args: terms (list): terms to join Examples: self._and_join(['term1']) -> 'term1' self._and_join(['term1', 'term2']) -> 'term1 AND term2' self._and_join(['term1', 'term2', 'term3']) -> 'term1 AND term2 AND term3' Returns: str
373,657
def log_source(self, loglevel=): ll = loglevel.upper() if ll == : return else: if "run_keyword_and_ignore_error" not in [check_error_ignored[3] for check_error_ignored in inspect.stack()]: source = self._current_application().page_source self._log(source, ll) return source else: return
Logs and returns the entire html source of the current page or frame. The `loglevel` argument defines the used log level. Valid log levels are `WARN`, `INFO` (default), `DEBUG`, `TRACE` and `NONE` (no logging).
373,658
def _population_load_script(work_bams, names, chrom, pairmode, items): bed_file = _get_regional_bed_file(items[0]) if bed_file: return _population_prep_targeted.format(bam_file_str=",".join(work_bams), names_str=",".join(names), chrom=chrom, num_cores=0, pairmode=pairmode, bed_file=bed_file) else: return _population_prep.format(bam_file_str=",".join(work_bams), names_str=",".join(names), chrom=chrom, num_cores=0, pairmode=pairmode)
Prepare BAMs for assessing CNVs in a population.
373,659
def _chain_future(source, dest): if not isinstance(source, (asyncio.Future, concurrent.futures.Future)): raise TypeError() if not isinstance(dest, (asyncio.Future, concurrent.futures.Future)): raise TypeError() source_loop = source._loop if isinstance(source, asyncio.Future) else None dest_loop = dest._loop if isinstance(dest, asyncio.Future) else None def _set_state(future, other): if isinstance(future, asyncio.Future): _copy_future_state(other, future) else: _set_concurrent_future_state(future, other) def _call_check_cancel(destination): if destination.cancelled(): if source_loop is None or source_loop is dest_loop: source.cancel() else: source_loop.call_soon_threadsafe(source.cancel) def _call_set_state(source): if dest_loop is None or dest_loop is source_loop: _set_state(dest, source) else: dest_loop.call_soon_threadsafe(_set_state, dest, source) dest.add_done_callback(_call_check_cancel) source.add_done_callback(_call_set_state)
Chain two futures so that when one completes, so does the other. The result (or exception) of source will be copied to destination. If destination is cancelled, source gets cancelled too. Compatible with both asyncio.Future and concurrent.futures.Future.
373,660
def shutdown(self): if self.cycle is None or self.cycle.response_complete: self.transport.close() else: self.cycle.keep_alive = False
Called by the server to commence a graceful shutdown.
373,661
def vq_gating(x, num_experts, k, bneck, hparams=None, name="vq_gating"): with tf.variable_scope(name, reuse=tf.AUTO_REUSE): if hparams.use_scales: scales = tf.get_variable( "scales", [num_experts], tf.float32, initializer=tf.ones_initializer()) scales = tf.nn.softmax(scales) hparams.scales = scales input_size = x.get_shape().as_list()[-1] batch_size = common_layers.shape_list(x)[0] if k > 1: x = tf.layers.dense(x, input_size * k) x = tf.reshape(x, [batch_size * k, input_size]) inputs = tf.expand_dims(x, axis=1) inputs = tf.expand_dims(inputs, axis=1) hparams.z_size = int(math.log(num_experts, 2)) hparams.hidden_size = input_size hparams.top_k = k d = bneck.discrete_bottleneck(inputs) centroids = None exp_discrete = d["discrete"] embed_lookup = d["embed"] extra_loss = d["loss"] if hparams.residual_centroids: centroids = embed_lookup(exp_discrete) top_k_indices = tf.squeeze(exp_discrete, axis=1) tf.summary.histogram("discrete_counts", top_k_indices) if k > 1: top_k_indices = tf.reshape(top_k_indices, [batch_size, k]) top_k_gates = tf.ones([batch_size, k]) gates = _rowwise_unsorted_segment_sum(top_k_gates, top_k_indices, num_experts) count_per_expert = tf.reduce_sum(gates, axis=0) if hparams.use_scales: scale_loss = tf.reduce_mean(tf.to_float(count_per_expert) * scales) extra_loss += scale_loss if common_layers.should_generate_summaries(): tf.summary.histogram("vq_loss", extra_loss) tf.summary.historgram("scale_loss", scale_loss) return gates, extra_loss, centroids
VQ gating. Args: x: input Tensor with shape [batch_size, input_size] num_experts: an integer k: an integer - number of experts per example bneck: a bottleneck object hparams: optional hparams name: an optional string Returns: gates: a Tensor with shape [batch_size, num_experts] load: a Tensor with shape [num_experts]
373,662
def _set_pg(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("pgid",pg.pg, yang_name="pg", rest_name="pg", parent=self, is_container=, user_ordered=False, path_helper=self._path_helper, yang_keys=, extensions={u: {u: u, u: None, u: u, u: u}}), is_container=, yang_name="pg", rest_name="pg", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u, u: None, u: u, u: u}}, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "list", : , }) self.__pg = t if hasattr(self, ): self._set()
Setter method for pg, mapped from YANG variable /rbridge_id/ag/pg (list) If this variable is read-only (config: false) in the source YANG file, then _set_pg is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_pg() directly.
373,663
def topology_mdtraj(traj): import mdtraj as md top = {} top[] = [a.element.symbol for a in traj.topology.atoms] top[] = [a.name for a in traj.topology.atoms] top[] = [(a.index, b.index) for a, b in traj.topology.bonds] top[] = md.compute_dssp(traj[0])[0] top[] = [r.name for r in traj.topology.residues ] top[] = [ [a.index for a in r.atoms] for r in traj.topology.residues ] return top
Generate topology spec for the MolecularViewer from mdtraj. :param mdtraj.Trajectory traj: the trajectory :return: A chemview-compatible dictionary corresponding to the topology defined in mdtraj.
373,664
def bulk_history_create(self, objs, batch_size=None): historical_instances = [ self.model( history_date=getattr(instance, "_history_date", now()), history_user=getattr(instance, "_history_user", None), history_change_reason=getattr(instance, "changeReason", ""), history_type="+", **{ field.attname: getattr(instance, field.attname) for field in instance._meta.fields if field.name not in self.model._history_excluded_fields } ) for instance in objs ] return self.model.objects.bulk_create( historical_instances, batch_size=batch_size )
Bulk create the history for the objects specified by objs
373,665
def get_changed_files(self) -> List[str]: out = shell_tools.output_of( , , , self.compare_commit_id, self.actual_commit_id, , cwd=self.destination_directory) return [e for e in out.split() if e.strip()]
Get the files changed on one git branch vs another. Returns: List[str]: File paths of changed files, relative to the git repo root.
373,666
def stage_http_response1(self, conn_id, version, status, reason, headers): self._http_response_version = version self._http_response_status = status self._http_response_reason = reason self._http_response_headers = headers
Set response http info including headers, status, etc. conn_id unused here. Used in log
373,667
def str_numerator(self): if not self.undefined: return None unit_numerator, unit = self._unit_class(self.numerator).auto formatter = if unit_numerator == self.numerator else numerator = locale.format(formatter, unit_numerator, grouping=True) return .format(numerator, unit)
Returns the numerator with formatting.
373,668
def mount(name=None, **kwargs): -a*** flags = [] opts = {} if kwargs.get(, False): flags.append() if kwargs.get(, False): opts[] = kwargs.get() if name in [None, ]: if name == : salt.utils.versions.warn_until( , -a\ None\) flags.append() name = None res = __salt__[]( __utils__[]( command=, flags=flags, opts=opts, target=name, ), python_shell=False, ) return __utils__[](res, )
Mounts ZFS file systems name : string name of the filesystem, having this set to None will mount all filesystems. (this is the default) overlay : boolean perform an overlay mount. options : string optional comma-separated list of mount options to use temporarily for the duration of the mount. .. versionadded:: 2016.3.0 .. versionchanged:: 2018.3.1 .. warning:: Passing '-a' as name is deprecated and will be removed in Sodium. CLI Example: .. code-block:: bash salt '*' zfs.mount salt '*' zfs.mount myzpool/mydataset salt '*' zfs.mount myzpool/mydataset options=ro
373,669
def _act(self, utterance: str) -> list: if self.stateful: utterance = [[utterance], [self.key]] else: utterance = [[utterance]] agent_response: list = self.agent(*utterance) return agent_response
Infers DeepPavlov agent with raw user input extracted from Alexa request. Args: utterance: Raw user input extracted from Alexa request. Returns: response: DeepPavlov agent response.
373,670
def find_by_tag(self, tag, params={}, **options): path = "/tags/%s/tasks" % (tag) return self.client.get_collection(path, params, **options)
Returns the compact task records for all tasks with the given tag. Parameters ---------- tag : {Id} The tag in which to search for tasks. [params] : {Object} Parameters for the request
373,671
def urlvoid_check(name, api_key): if not is_fqdn(name): return None url = .format(key=api_key, name=name) response = requests.get(url) tree = ET.fromstring(response.text) if tree.find(): return [e.text for e in tree.find()] else: return None
Checks URLVoid.com for info on a domain
373,672
def _validate_readonly(self, readonly, field, value): if readonly: if not self._is_normalized: self._error(field, errors.READONLY_FIELD) has_error = errors.READONLY_FIELD in \ self.document_error_tree.fetch_errors_from( self.document_path + (field,)) if self._is_normalized and has_error: self._drop_remaining_rules()
{'type': 'boolean'}
373,673
def remove_sub(self, sub): for _sid in self.get(, sub): self.remove(, _sid, sub) self.delete(, sub)
Remove all references to a specific Subject ID :param sub: A Subject ID
373,674
def convert_pronouns( mrf_lines ): _P_s mrf to syntactic analyzer i = 0 while ( i < len(mrf_lines) ): line = mrf_lines[i] if in line: for [pattern, replacement] in _pronConversions: lastline = line line = re.sub(pattern, replacement, line) if lastline != line: mrf_lines[i] = line break i += 1 return mrf_lines
Converts pronouns (analysis lines with '_P_') from Filosoft's mrf to syntactic analyzer's mrf format; Uses the set of predefined pronoun conversion rules from _pronConversions; _pronConversions should be a list of lists, where each outer list stands for a single conversion rule and inner list contains a pair of elements: first is the regexp pattern and the second is the replacement, used in re.sub( pattern, replacement, line ) Returns the input mrf list, with the lines converted from one format to another;
373,675
def K_value(P=None, Psat=None, phi_l=None, phi_g=None, gamma=None, Poynting=1): rs law, or an equation of state model, or an activity coefficient model, or a combined equation of state-activity model. The calculation procedure will use the most advanced approach with the provided inputs: * If `P`, `Psat`, `phi_l`, `phi_g`, and `gamma` are provided, use the combined approach. * If `P`, `Psat`, and `gamma` are provided, use the modified Raoults law. Definitions: .. math:: K_i=\frac{y_i}{x_i} Raoults law): .. math:: K_i = \frac{\gamma_i P_{i}^{sat}}{P} Equation of state only: .. math:: K_i = \frac{\phi_i^l}{\phi_i^v} = \frac{f_i^l}{f_i^v} Combined approach (liquid reference fugacity coefficient is normally calculated the saturation pressure for it as a pure species; vapor fugacity coefficient calculated normally): .. math:: K_i = \frac{\gamma_i P_i^{sat} \phi_i^{l,ref}}{\phi_i^v P} Combined approach, with Poynting Correction Factor (liquid molar volume in the integral is for i as a pure species only): .. math:: K_i = \frac{\gamma_i P_i^{sat} \phi_i^{l, ref} \exp\left[\frac{ \int_{P_i^{sat}}^P V_i^l dP}{RT}\right]}{\phi_i^v P} Parameters ---------- P : float System pressure, optional Psat : float Vapor pressure of species i, [Pa] phi_l : float Fugacity coefficient of species i in the liquid phase, either at the system conditions (EOS-only case) or at the saturation pressure of species i as a pure species (reference condition for the combined approach), optional [-] phi_g : float Fugacity coefficient of species i in the vapor phase at the system conditions, optional [-] gamma : float Activity coefficient of species i in the liquid phase, optional [-] Poynting : float Poynting correction factor, optional [-] Returns ------- K : float Equilibrium K value of component i, calculated with an approach depending on the provided inputs [-] Notes ----- The Poynting correction factor is normally simplified as follows, due to a liquids law: >>> K_value(101325, 3000.) 0.029607698001480384 Modified Raoult try: if gamma: if phi_l: return gamma*Psat*phi_l*Poynting/(phi_g*P) return gamma*Psat*Poynting/P elif phi_l: return phi_l/phi_g return Psat/P except TypeError: raise Exception()
r'''Calculates the equilibrium K-value assuming Raoult's law, or an equation of state model, or an activity coefficient model, or a combined equation of state-activity model. The calculation procedure will use the most advanced approach with the provided inputs: * If `P`, `Psat`, `phi_l`, `phi_g`, and `gamma` are provided, use the combined approach. * If `P`, `Psat`, and `gamma` are provided, use the modified Raoult's law. * If `phi_l` and `phi_g` are provided, use the EOS only method. * If `P` and `Psat` are provided, use Raoult's law. Definitions: .. math:: K_i=\frac{y_i}{x_i} Raoult's law: .. math:: K_i = \frac{P_{i}^{sat}}{P} Activity coefficient, no EOS (modified Raoult's law): .. math:: K_i = \frac{\gamma_i P_{i}^{sat}}{P} Equation of state only: .. math:: K_i = \frac{\phi_i^l}{\phi_i^v} = \frac{f_i^l}{f_i^v} Combined approach (liquid reference fugacity coefficient is normally calculated the saturation pressure for it as a pure species; vapor fugacity coefficient calculated normally): .. math:: K_i = \frac{\gamma_i P_i^{sat} \phi_i^{l,ref}}{\phi_i^v P} Combined approach, with Poynting Correction Factor (liquid molar volume in the integral is for i as a pure species only): .. math:: K_i = \frac{\gamma_i P_i^{sat} \phi_i^{l, ref} \exp\left[\frac{ \int_{P_i^{sat}}^P V_i^l dP}{RT}\right]}{\phi_i^v P} Parameters ---------- P : float System pressure, optional Psat : float Vapor pressure of species i, [Pa] phi_l : float Fugacity coefficient of species i in the liquid phase, either at the system conditions (EOS-only case) or at the saturation pressure of species i as a pure species (reference condition for the combined approach), optional [-] phi_g : float Fugacity coefficient of species i in the vapor phase at the system conditions, optional [-] gamma : float Activity coefficient of species i in the liquid phase, optional [-] Poynting : float Poynting correction factor, optional [-] Returns ------- K : float Equilibrium K value of component i, calculated with an approach depending on the provided inputs [-] Notes ----- The Poynting correction factor is normally simplified as follows, due to a liquid's low pressure dependency: .. math:: K_i = \frac{\gamma_i P_i^{sat} \phi_i^{l, ref} \exp\left[\frac{V_l (P-P_i^{sat})}{RT}\right]}{\phi_i^v P} Examples -------- Raoult's law: >>> K_value(101325, 3000.) 0.029607698001480384 Modified Raoult's law: >>> K_value(P=101325, Psat=3000, gamma=0.9) 0.026646928201332347 EOS-only approach: >>> K_value(phi_l=1.6356, phi_g=0.88427) 1.8496613025433408 Gamma-phi combined approach: >>> K_value(P=1E6, Psat=1938800, phi_l=1.4356, phi_g=0.88427, gamma=0.92) 2.8958055544121137 Gamma-phi combined approach with a Poynting factor: >>> K_value(P=1E6, Psat=1938800, phi_l=1.4356, phi_g=0.88427, gamma=0.92, ... Poynting=0.999) 2.8929097488577016 References ---------- .. [1] Gmehling, Jurgen, Barbel Kolbe, Michael Kleiber, and Jurgen Rarey. Chemical Thermodynamics for Process Simulation. 1st edition. Weinheim: Wiley-VCH, 2012. .. [2] Skogestad, Sigurd. Chemical and Energy Process Engineering. 1st edition. Boca Raton, FL: CRC Press, 2008.
373,676
def run(self, i_str, start_count=0, start_chunk_time=None): try: if not os.path.exists(self.tmp_dir_path): os.makedirs(self.tmp_dir_path) if start_chunk_time is None: start_chunk_time = time.time() i_chunk = self.reader(i_str) t_path = None len_clean_visible = 0 sources = set() next_idx = 0 input_item_count = 0 for si in i_chunk: next_idx += 1 if gevent: gevent.sleep(0) if next_idx <= start_count: continue if next_idx % self.rate_log_interval == 0: elapsed = time.time() - start_chunk_time if elapsed > 0: rate = float(next_idx) / elapsed logger.info( , next_idx - start_count, elapsed, rate, i_str) if not self.t_chunk: self.output_chunk_max_clean_visible_bytes, len_clean_visible) len_clean_visible = 0 self._process_output_chunk( start_count, next_idx, sources, i_str, t_path) start_count = next_idx input_item_count += 1 if (((self.input_item_limit is not None) and (input_item_count > self.input_item_limit))): break if self.t_chunk is not None: self._process_output_chunk( start_count, next_idx, sources, i_str, t_path) return next_idx finally: if self.t_chunk is not None: self.t_chunk.close() for transform in self.batch_transforms: transform.shutdown() if self.cleanup_tmp_files: rmtree(self.tmp_dir_path)
Run the pipeline. This runs all of the steps described in the pipeline constructor, reading from some input and writing to some output. :param str i_str: name of the input file, or other reader-specific description of where to get input :param int start_count: index of the first stream item :param int start_chunk_time: timestamp for the first stream item
373,677
def next(self) -> Future: self._running_future = Future() if self._finished: self._return_result(self._finished.popleft()) return self._running_future
Returns a `.Future` that will yield the next available result. Note that this `.Future` will not be the same object as any of the inputs.
373,678
def all_after_notification(self, model, prop_name, info): self.logger.debug(NotificationOverview(info))
The method logs all changes that notified recursively trough the hierarchies of the states after the change occurs in the rafcon.core object. The method register as observer of observable StateMachineModel.state_machine of any observed StateMachineModel. :param model: StateMachineModel that is represents the state_machine which has been changed :param prop_name: Name of property that notifies -> here always 'state_machine' :param info: Dictionary that hold recursive notification information like models, property and method names :return:
373,679
def alien_filter(name, location, size, unsize): (fname, flocation, fsize, funsize) = ([] for i in range(4)) for n, l, s, u in zip(name, location, size, unsize): if "slackbuilds" != l: fname.append(n) flocation.append(l) fsize.append(s) funsize.append(u) return [fname, flocation, fsize, funsize]
Fix to avoid packages include in slackbuilds folder
373,680
def _check_triple(self, triple): subj, pred, obj = triple if self._should_ignore_predicate(pred): log.info("Ignoring triple with predicate " .format(self._field_name_from_uri(pred))) return classes = [] log.warning("Possible member %s found" % pred) pred = self._expand_qname(pred) if self._namespace_from_uri(pred) not in self.allowed_namespaces: log.info("Member %s does not use an allowed namespace", pred) return instanceof = self._is_instance((subj, pred, obj)) if type(instanceof) == rt.URIRef: instanceof = self._expand_qname(instanceof) if hasattr(self.schema_def, "attributes_by_class") and \ not self.schema_def.attributes_by_class: log.info("Parsed ontology not found. Parsing...") self.schema_def.parse_ontology() class_invalid = self._validate_class(instanceof) if class_invalid: log.warning("Invalid class %s" % instanceof) return class_invalid classes = self._superclasses_for_subject(self.graph, instanceof) classes.append(instanceof) member_invalid = self._validate_member(pred, classes, instanceof) if member_invalid: log.warning("Invalid member of class") return member_invalid dupe_invalid = self._validate_duplication((subj, pred), instanceof) if dupe_invalid: log.warning("Duplication found") return dupe_invalid self.checked_attributes.append((subj, pred)) log.warning("successfully validated triple, no errors") return
compare triple to ontology, return error or None
373,681
def _get_pos_name(pos_code, names=, english=True, pos_map=POS_MAP): pos_code = pos_code.lower() if names not in (, , ): raise ValueError("names must be one of , , or " "; not ".format(names)) logger.debug("Getting {} POS name for formatted as .".format( if english else , pos_code, names)) for i in range(1, len(pos_code) + 1): try: pos_key = pos_code[0:i] pos_entry = pos_map[pos_key] break except KeyError: if i == len(pos_code): logger.warning("part of speech not recognized: ".format( pos_code)) return None pos = (pos_entry[1 if english else 0], ) if names == : logger.debug("Part of speech name found: ".format(pos[0])) return pos[0] if len(pos_entry) == 3 and pos_key != pos_code: sub_map = pos_entry[2] logger.debug("Found parent part of speech name . Descending to " "look for child name for ".format( pos_entry[1], pos_code)) sub_pos = _get_pos_name(pos_code, names, english, sub_map) if names == : pos = pos + sub_pos if sub_pos else pos else: pos = (sub_pos, ) name = pos if names == else pos[-1] logger.debug("Part of speech name found: ".format(name)) return name
Gets the part of speech name for *pos_code*.
373,682
def keys_present(name, number, save_dir, region=None, key=None, keyid=None, profile=None, save_format="{2}\n{0}\n{3}\n{1}\n"): key-{number} ret = {: name, : True, : , : {}} if not __salt__[](name, region, key, keyid, profile): ret[] = False ret[] = .format(name) return ret if not isinstance(number, int): ret[] = ret[] = False return ret if not os.path.isdir(save_dir): ret[] = .format(save_dir) ret[] = False return ret keys = __salt__[](user_name=name, region=region, key=key, keyid=keyid, profile=profile) if isinstance(keys, six.string_types): log.debug(, keys) error, message = _get_error(keys) ret[] = .format(error, message) ret[] = False return ret keys = keys[][][] log.debug(, keys) if len(keys) >= number: ret[] = .format(name) ret[] = True return ret if __opts__[]: ret[] = .format(name) ret[] = None return ret new_keys = {} for i in range(number-len(keys)): created = __salt__[](name, region, key, keyid, profile) if isinstance(created, six.string_types): error, message = _get_error(created) ret[] = .format(error, message) ret[] = False return ret log.debug(, created) response = result = new_keys[six.text_type(i)] = {} new_keys[six.text_type(i)][] = created[response][result][][] new_keys[six.text_type(i)][] = created[response][result][][] try: with salt.utils.files.fopen(.format(save_dir, name), ) as _wrf: for key_num, key in new_keys.items(): key_id = key[] secret_key = key[] _wrf.write(salt.utils.stringutils.to_str( save_format.format( key_id, secret_key, .format(key_num), .format(key_num) ) )) ret[] = .format(save_dir, name) ret[] = True ret[] = new_keys return ret except IOError: ret[] = .format(save_dir, name) ret[] = False return ret
.. versionadded:: 2015.8.0 Ensure the IAM access keys are present. name (string) The name of the new user. number (int) Number of keys that user should have. save_dir (string) The directory that the key/keys will be saved. Keys are saved to a file named according to the username privided. region (string) Region to connect to. key (string) Secret key to be used. keyid (string) Access key to be used. profile (dict) A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. save_format (dict) Save format is repeated for each key. Default format is "{2}\\n{0}\\n{3}\\n{1}\\n", where {0} and {1} are placeholders for new key_id and key respectively, whereas {2} and {3} are "key_id-{number}" and 'key-{number}' strings kept for compatibility.
373,683
def get_installed_distributions(local_only=True, skip=(, , )): if local_only: local_test = dist_is_local else: local_test = lambda d: True return [d for d in pkg_resources.working_set if local_test(d) and d.key not in skip]
Return a list of installed Distribution objects. If ``local_only`` is True (default), only return installations local to the current virtualenv, if in a virtualenv. ``skip`` argument is an iterable of lower-case project names to ignore; defaults to ('setuptools', 'pip', 'python'). [FIXME also skip virtualenv?]
373,684
def draw_points_heatmap_array(self, image_shape, alpha=1.0, size=1, raise_if_out_of_image=False): assert len(image_shape) == 2 or ( len(image_shape) == 3 and image_shape[-1] == 1), ( "Expected (H,W) or (H,W,1) as image_shape, got %s." % ( image_shape,)) arr = self.draw_points_on_image( np.zeros(image_shape, dtype=np.uint8), color=255, alpha=alpha, size=size, raise_if_out_of_image=raise_if_out_of_image ) return arr.astype(np.float32) / 255.0
Draw the points of the line string as a heatmap array. Parameters ---------- image_shape : tuple of int The shape of the image onto which to draw the point mask. alpha : float, optional Opacity of the line string points. Higher values denote a more visible points. size : int, optional Size of the points in pixels. raise_if_out_of_image : bool, optional Whether to raise an error if the line string is fully outside of the image. If set to False, no error will be raised and only the parts inside the image will be drawn. Returns ------- ndarray Float array of shape `image_shape` (no channel axis) with drawn line string points. All values are in the interval ``[0.0, 1.0]``.
373,685
def validate_args(f): def wrapper(self, args): arg_types = set([type(arg) for arg in args]) if len(arg_types) > 1: raise TypeError("Mixed input types are not allowed") elif list(arg_types)[0] not in (dict, str): raise TypeError("Only dict and str types accepted") return f(self, args) return wrapper
Ensures that *args consist of a consistent type :param f: any client method with *args parameter :return: function f
373,686
def get_tmaster(self, topologyName, callback=None): isWatching = False ret = { "result": None } if callback: isWatching = True else: def callback(data): ret["result"] = data self._get_tmaster_with_watch(topologyName, callback, isWatching) return ret["result"]
get tmaster
373,687
def to_primitive(self, value, context=None): if context and context.get(): epoch = dt(1970, 1, 1) value = (value - epoch).total_seconds() return int(value) elif context and context.get(): return value else: return super(Type, self).to_primitive(value, context)
Schematics serializer override If epoch_date is true then convert the `datetime.datetime` object into an epoch `int`.
373,688
def configure(self): logger.debug( + hex(self.get_address()) + + str(self.get_channel()) + + str(self.get_resolution()) + + str(self.get_gain())) self.bus.write_byte(self.address, self.config)
Configure the device. Send the device configuration saved inside the MCP342x object to the target device.
373,689
def get_id(date=None, project: str = , instance_id: int = None) -> str: if date is None: date = datetime.datetime.utcnow() if isinstance(date, datetime.datetime): date = date.strftime() if instance_id is None: instance_id = randint(0, 9999) return .format(date, project, instance_id)
Get a SBI Identifier. Args: date (str or datetime.datetime, optional): UTC date of the SBI project (str, optional ): Project Name instance_id (int, optional): SBI instance identifier Returns: str, Scheduling Block Instance (SBI) ID.
373,690
def evaluate( loop_hparams, planner_hparams, policy_dir, model_dir, eval_metrics_dir, agent_type, eval_mode, eval_with_learner, log_every_steps, debug_video_path, num_debug_videos=1, random_starts_step_limit=None, report_fn=None, report_metric=None ): if eval_with_learner: assert agent_type == "policy" if report_fn: assert report_metric is not None eval_metrics_writer = tf.summary.FileWriter(eval_metrics_dir) video_writers = () kwargs = {} if eval_mode in ["agent_real", "agent_simulated"]: if not eval_with_learner: if debug_video_path: tf.gfile.MakeDirs(debug_video_path) video_writers = [ common_video.WholeVideoWriter( fps=10, output_path=os.path.join(debug_video_path, "{}.avi".format(i)), file_format="avi", ) for i in range(num_debug_videos) ] kwargs["eval_fn"] = make_eval_fn_with_agent( agent_type, eval_mode, planner_hparams, model_dir, log_every_steps=log_every_steps, video_writers=video_writers, random_starts_step_limit=random_starts_step_limit ) eval_metrics = rl_utils.evaluate_all_configs( loop_hparams, policy_dir, **kwargs ) else: eval_metrics = evaluate_world_model( agent_type, loop_hparams, planner_hparams, model_dir, policy_dir, random_starts_step_limit, debug_video_path, log_every_steps ) rl_utils.summarize_metrics(eval_metrics_writer, eval_metrics, 0) for video_writer in video_writers: video_writer.finish_to_disk() if report_fn: if report_metric == "mean_reward": metric_name = rl_utils.get_metric_name( sampling_temp=loop_hparams.eval_sampling_temps[0], max_num_noops=loop_hparams.eval_max_num_noops, clipped=False ) report_fn(eval_metrics[metric_name], 0) else: report_fn(eval_metrics[report_metric], 0) return eval_metrics
Evaluate.
373,691
def get_word_id (root): global UNIQ_WORDS if root not in UNIQ_WORDS: UNIQ_WORDS[root] = len(UNIQ_WORDS) return UNIQ_WORDS[root]
lookup/assign a unique identify for each word root
373,692
def user_has_permission(self, user, name): targetRecord = AuthMembership.objects(creator=self.client, user=user).first() if not targetRecord: return False for group in targetRecord.groups: if self.has_permission(group.role, name): return True return False
verify user has permission
373,693
def _handle_union(self, node, scope, ctxt, stream): self._dlog("handling union") union_cls = StructUnionDef("union", self, node) return union_cls
TODO: Docstring for _handle_union. :node: TODO :scope: TODO :ctxt: TODO :stream: TODO :returns: TODO
373,694
def generate_all(sumlevel, d): from geoid.civick import GVid from geoid.tiger import TigerGeoid from geoid.acs import AcsGeoid sumlevel = int(sumlevel) d = dict(d.items()) if in d: d[] = d[] del d[] if in d: d[] = d[] del d[] if in d: d[] = d[] del d[] gvid_class = GVid.resolve_summary_level(sumlevel) if not gvid_class: return {} geoidt_class = TigerGeoid.resolve_summary_level(sumlevel) geoid_class = AcsGeoid.resolve_summary_level(sumlevel) try: return dict( gvid=str(gvid_class(**d)), geoid=str(geoid_class(**d)), geoidt=str(geoidt_class(**d)) ) except: raise
Generate a dict that includes all of the available geoid values, with keys for the most common names for those values.
373,695
def plate_exchanger_identifier(self): LAB s = ( + str(round(self.wavelength*1000, 2)) + + str(round(self.amplitude*1000, 2)) + + .join([str(i) for i in self.chevron_angles])) return s
Method to create an identifying string in format 'L' + wavelength + 'A' + amplitude + 'B' + chevron angle-chevron angle. Wavelength and amplitude are specified in units of mm and rounded to two decimal places.
373,696
def _process_wave_param(self, pval): return self._process_generic_param( pval, self._internal_wave_unit, equivalencies=u.spectral())
Process individual model parameter representing wavelength.
373,697
def safe_int_conv(number): try: return int(np.array(number).astype(int, casting=)) except TypeError: raise ValueError(.format(number))
Safely convert a single number to integer.
373,698
def is_for_driver_task(self): return all( len(x) == 0 for x in [self.module_name, self.class_name, self.function_name])
See whether this function descriptor is for a driver or not. Returns: True if this function descriptor is for driver tasks.
373,699
def list(self, **filters): LOG.debug(u, self.model_class.__name__, filters) query = self.__queryset__() perm = build_permission_name(self.model_class, ) LOG.debug(u"Checking if user %s has_perm %s" % (self.user, perm)) query_with_permission = filter(lambda o: self.user.has_perm(perm, obj=o), query) ids = map(lambda o: o.pk, query_with_permission) queryset = self.__queryset__().filter(pk__in=ids) related = getattr(self, , None) if related: queryset = queryset.select_related(*related) return queryset
Returns a queryset filtering object by user permission. If you want, you can specify filter arguments. See https://docs.djangoproject.com/en/dev/ref/models/querysets/#filter for more details