positive
stringlengths
100
30.3k
anchor
stringlengths
1
15k
def request(self, endpoint, method='GET', params=None, version='1.1', json_encoded=False): """Return dict of response received from Twitter's API :param endpoint: (required) Full url or Twitter API endpoint (e.g. search/tweets) :type endpoint: string :param meth...
Return dict of response received from Twitter's API :param endpoint: (required) Full url or Twitter API endpoint (e.g. search/tweets) :type endpoint: string :param method: (optional) Method of accessing data, either GET, POST or DELETE. (default G...
def _check_layer_count(self, layer): """Check for the validity of the layer. :param layer: QGIS layer :type layer: qgis.core.QgsVectorLayer :return: """ if layer: if not layer.isValid(): raise ImpactReport.LayerException('Layer is not valid') ...
Check for the validity of the layer. :param layer: QGIS layer :type layer: qgis.core.QgsVectorLayer :return:
def get_eager_datasource(cls, session, datasource_type, datasource_id): """Returns datasource with columns and metrics.""" datasource_class = ConnectorRegistry.sources[datasource_type] return ( session.query(datasource_class) .options( subqueryload(datasou...
Returns datasource with columns and metrics.
def pythonvaluetotime(time_val): "Convert a time or time range from Python datetime to ArcGIS REST server" if time_val is None: return None elif isinstance(time_val, numeric): return str(long(time_val * 1000.0)) elif isinstance(time_val, date): dtlist = [time_val.year, time_val.m...
Convert a time or time range from Python datetime to ArcGIS REST server
def attr_names(cls) -> List[str]: """ Returns annotated attribute names :return: List[str] """ return [k for k, v in cls.attr_types().items()]
Returns annotated attribute names :return: List[str]
def generate_anchors(base_size=16, ratios=[0.5, 1, 2], scales=2**np.arange(3, 6)): """ Generate anchor (reference) windows by enumerating aspect ratios X scales wrt a reference (0, 0, 15, 15) window. """ base_anchor = np.array([1, 1, base_size, base_size], dtype='float32') - 1 ...
Generate anchor (reference) windows by enumerating aspect ratios X scales wrt a reference (0, 0, 15, 15) window.
def run_matrix_ordered(self, process_data): """ Running pipelines one after the other. Returns dict: with two fields: success True/False and captured output (list of str). """ output = [] for entry in self.matrix: env = entry['env'].copy() ...
Running pipelines one after the other. Returns dict: with two fields: success True/False and captured output (list of str).
def coerce_value( value: Any, type_: GraphQLInputType, blame_node: Node = None, path: Path = None ) -> CoercedValue: """Coerce a Python value given a GraphQL Type. Returns either a value which is valid for the provided type or a list of encountered coercion errors. """ # A value must be provide...
Coerce a Python value given a GraphQL Type. Returns either a value which is valid for the provided type or a list of encountered coercion errors.
def _GetLoadConfigTimestamp(self, pefile_object): """Retrieves the timestamp from the Load Configuration directory. Args: pefile_object (pefile.PE): pefile object. Returns: int: load configuration timestamps or None if there are none present. """ if not hasattr(pefile_object, 'DIRECTOR...
Retrieves the timestamp from the Load Configuration directory. Args: pefile_object (pefile.PE): pefile object. Returns: int: load configuration timestamps or None if there are none present.
def _import(self, record_key, record_data, overwrite=True, last_modified=0.0, **kwargs): ''' a helper method for other storage clients to import into appdata :param record_key: string with key for record :param record_data: byte data for body of record :...
a helper method for other storage clients to import into appdata :param record_key: string with key for record :param record_data: byte data for body of record :param overwrite: [optional] boolean to overwrite existing records :param last_modified: [optional] float to record...
def setPermanences(self, segments, presynapticCellsBySource, permanence): """ Set the permanence of a specific set of synapses. Any synapses that don't exist will be initialized. Any existing synapses will be overwritten. Conceptually, this method takes a list of [segment, presynapticCell] pairs an...
Set the permanence of a specific set of synapses. Any synapses that don't exist will be initialized. Any existing synapses will be overwritten. Conceptually, this method takes a list of [segment, presynapticCell] pairs and initializes their permanence. For each segment, one synapse is added (although o...
def update_selection_sm_prior_condition(self, state_row_iter, selected_model_list, sm_selected_model_list): """State machine prior update of tree selection for one tree model row""" selected_path = self.tree_store.get_path(state_row_iter) tree_model_row = self.tree_store[selected_path] m...
State machine prior update of tree selection for one tree model row
def ip2hex(ip): ''' Converts an ip to a hex value that can be used with a hex bit mask ''' parts = ip.split(".") if len(parts) != 4: return None ipv = 0 for part in parts: try: p = int(part) if p < 0 or p > 255: return None ipv = (ipv << 8) + p ...
Converts an ip to a hex value that can be used with a hex bit mask
def compute_factorial(n): """ computes factorial of n """ sleep_walk(10) ret = 1 for i in range(n): ret = ret * (i + 1) return ret
computes factorial of n
def thread_info(self): """Return a string identifying the current thread and fiber.""" tid = threading.current_thread().name if tid == 'MainThread': tid = 'Main' current = fibers.current() fid = getattr(current, 'name') if current.parent else 'Root' return '{}...
Return a string identifying the current thread and fiber.
def difference_update(self, values): '''Remove an iterable of *values* from the set.''' d = self.value_pickler.dumps return self.cache.remove(tuple((d(v) for v in values)))
Remove an iterable of *values* from the set.
def create_country(cls, country, **kwargs): """Create Country Create a new Country This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.create_country(country, async=True) >>> result = thre...
Create Country Create a new Country This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.create_country(country, async=True) >>> result = thread.get() :param async bool :param Coun...
def get_objective_form_for_create(self, objective_record_types=None): """Gets the objective form for creating new objectives. A new form should be requested for each create transaction. arg: objectiveRecordTypes (osid.type.Type): array of objective record types return:...
Gets the objective form for creating new objectives. A new form should be requested for each create transaction. arg: objectiveRecordTypes (osid.type.Type): array of objective record types return: (osid.learning.ObjectiveForm) - the objective form raise: NullArgument ...
def _file_nums_str(self, n_all, n_type, n_ign): """Construct a string showing the number of different file types. Returns ------- f_str : str """ # 'other' is the difference between all and named n_oth = n_all - np.sum(n_type) f_str = "{} Files".format(n...
Construct a string showing the number of different file types. Returns ------- f_str : str
def kinks(path, tol=1e-8): """returns indices of segments that start on a non-differentiable joint.""" kink_list = [] for idx in range(len(path)): if idx == 0 and not path.isclosed(): continue try: u = path[(idx - 1) % len(path)].unit_tangent(1) v = path[i...
returns indices of segments that start on a non-differentiable joint.
def resolveWithMib(self, mibViewController): """Perform MIB variable ID and associated value conversion. Parameters ---------- mibViewController : :py:class:`~pysnmp.smi.view.MibViewController` class instance representing MIB browsing functionality. Returns ...
Perform MIB variable ID and associated value conversion. Parameters ---------- mibViewController : :py:class:`~pysnmp.smi.view.MibViewController` class instance representing MIB browsing functionality. Returns ------- : :py:class:`~pysnmp.smi.rfc1902.ObjectT...
def get_biopython_pepstats(self, clean_seq=False): """Run Biopython's built in ProteinAnalysis module and store statistics in the ``annotations`` attribute.""" if self.seq: if clean_seq: # TODO: can make this a property of the SeqProp class seq = self.seq_str.replace('X', '...
Run Biopython's built in ProteinAnalysis module and store statistics in the ``annotations`` attribute.
def query(self, variables, evidence=None, elimination_order=None, joint=True): """ Parameters ---------- variables: list list of variables for which you want to compute the probability evidence: dict a dict key, value pair as {var: state_of_var_observed} ...
Parameters ---------- variables: list list of variables for which you want to compute the probability evidence: dict a dict key, value pair as {var: state_of_var_observed} None if no evidence elimination_order: list order of variable elim...
def union(a, b): """Assert equality of two nodes a and b so find(a) is find(b).""" a = find(a) b = find(b) if a is not b: if a.rank < b.rank: a.parent = b elif b.rank < a.rank: b.parent = a else: b.parent = a a.rank += 1
Assert equality of two nodes a and b so find(a) is find(b).
def branchScale(self): """See docs for `Model` abstract base class.""" bscales = [m.branchScale for m in self._models] return (self.catweights * bscales).sum()
See docs for `Model` abstract base class.
def bpf_select(fds_list, timeout=None): """A call to recv() can return several frames. This functions hides the fact that some frames are read from the internal buffer.""" # Check file descriptors types bpf_scks_buffered = list() select_fds = list() for tmp_fd in fds_list: # Specif...
A call to recv() can return several frames. This functions hides the fact that some frames are read from the internal buffer.
def initialize_service(service, operation, api_protocol): """create lib and test dirs if not exist """ lib_dir = get_lib_dir(service) test_dir = get_test_dir(service) print_progress('Initializing service', service, 'green') client = boto3.client(service) service_class = client.__class__.__...
create lib and test dirs if not exist
def get_recent_linkbacks(number=5, template='zinnia/tags/linkbacks_recent.html'): """ Return the most recent linkbacks. """ entry_published_pks = map(smart_text, Entry.published.values_list('id', flat=True)) content_type = ContentType.objects.ge...
Return the most recent linkbacks.
def _build_headers(self, method, auth_session): """Create headers for the request. Parameters method (str) HTTP method (e.g. 'POST'). auth_session (Session) The Session object containing OAuth 2.0 credentials. Returns headers (d...
Create headers for the request. Parameters method (str) HTTP method (e.g. 'POST'). auth_session (Session) The Session object containing OAuth 2.0 credentials. Returns headers (dict) Dictionary of access headers to attach...
def make_if_statement(instr, queue, stack, context): """ Make an ast.If block from a POP_JUMP_IF_TRUE or POP_JUMP_IF_FALSE. """ test_expr = make_expr(stack) if isinstance(instr, instrs.POP_JUMP_IF_TRUE): test_expr = ast.UnaryOp(op=ast.Not(), operand=test_expr) first_block = popwhile(op....
Make an ast.If block from a POP_JUMP_IF_TRUE or POP_JUMP_IF_FALSE.
def get_N50(readlengths): """Calculate read length N50. Based on https://github.com/PapenfussLab/Mungo/blob/master/bin/fasta_stats.py """ return readlengths[np.where(np.cumsum(readlengths) >= 0.5 * np.sum(readlengths))[0][0]]
Calculate read length N50. Based on https://github.com/PapenfussLab/Mungo/blob/master/bin/fasta_stats.py
def getNodeDegrees(grph, weightString = "weight", strictMode = False, returnType = int, edgeType = 'bi'): """ Retunrs a dictionary of nodes to their degrees, the degree is determined by adding the weight of edge with the weight being the string weightString that gives the name of the attribute of each edge con...
Retunrs a dictionary of nodes to their degrees, the degree is determined by adding the weight of edge with the weight being the string weightString that gives the name of the attribute of each edge containng thier weight. The Weights are then converted to the type returnType. If weightString is give as False instead ea...
def extract_objects_from_source(self, text, type_filter=None): '''Extract object declarations from a text buffer Args: text (str): Source code to parse type_filter (class, optional): Object class to filter results Returns: List of parsed objects. ''' objects = parse_verilog(text) ...
Extract object declarations from a text buffer Args: text (str): Source code to parse type_filter (class, optional): Object class to filter results Returns: List of parsed objects.
def import_new_atlas_pointings( self, recent=False): """ *Import any new ATLAS pointings from the atlas3/atlas4 databases into the ``atlas_exposures`` table of the Atlas Movers database* **Key Arguments:** - ``recent`` -- only sync the most recent 2 weeks of ...
*Import any new ATLAS pointings from the atlas3/atlas4 databases into the ``atlas_exposures`` table of the Atlas Movers database* **Key Arguments:** - ``recent`` -- only sync the most recent 2 weeks of data (speeds things up) **Return:** - None **Usage:** ...
def OnInsertCols(self, event): """Inserts the maximum of 1 and the number of selected columns""" bbox = self.grid.selection.get_bbox() if bbox is None or bbox[1][1] is None: # Insert rows at cursor ins_point = self.grid.actions.cursor[1] - 1 no_cols = 1 ...
Inserts the maximum of 1 and the number of selected columns
def hash(path, hash_function=hashlib.sha512): # @ReservedAssignment ''' Hash file or directory. Parameters ---------- path : ~pathlib.Path File or directory to hash. hash_function : ~typing.Callable[[], hash object] Function which creates a hashlib hash object when called. Defa...
Hash file or directory. Parameters ---------- path : ~pathlib.Path File or directory to hash. hash_function : ~typing.Callable[[], hash object] Function which creates a hashlib hash object when called. Defaults to ``hashlib.sha512``. Returns ------- hash object ...
def update_ase(db_file, identity, stdout, **key_value_pairs): """Connect to ASE db""" db_ase = ase.db.connect(db_file) _normalize_key_value_pairs_inplace(key_value_pairs) count = db_ase.update(identity, **key_value_pairs) stdout.write(' Updating {0} key value pairs in ASE db row id = {1}\n' ...
Connect to ASE db
def get_parent(brain_or_object, catalog_search=False): """Locate the parent object of the content/catalog brain The `catalog_search` switch uses the `portal_catalog` to do a search return a brain instead of the full parent object. However, if the search returned no results, it falls back to return the ...
Locate the parent object of the content/catalog brain The `catalog_search` switch uses the `portal_catalog` to do a search return a brain instead of the full parent object. However, if the search returned no results, it falls back to return the full parent object. :param brain_or_object: A single cata...
def _discard_config(self): """Set candidate_cfg to current running-config. Erase the merge_cfg file.""" discard_candidate = "copy running-config {}".format( self._gen_full_path(self.candidate_cfg) ) discard_merge = "copy null: {}".format(self._gen_full_path(self.merge_cfg)) ...
Set candidate_cfg to current running-config. Erase the merge_cfg file.
def parse_instruction(string, location, tokens): """Parse an x86 instruction. """ prefix_str = tokens.get("prefix", None) mnemonic_str = tokens.get("mnemonic") operands = [op for op in tokens.get("operands", [])] infer_operands_size(operands) # Quick hack: Capstone returns rep instead of r...
Parse an x86 instruction.
def send_message(self, text: str, reply: int=None, link_preview: bool=None, on_success: callable=None, reply_markup: botapi.ReplyMarkup=None): """ Send message to this peer. :param text: Text to send. :param reply: Message object or message_id to reply to. :p...
Send message to this peer. :param text: Text to send. :param reply: Message object or message_id to reply to. :param link_preview: Whether or not to show the link preview for this message :param on_success: Callback to call when call is complete. :type reply: int or Message
def _parse_log_entry(entry_pb): """Special helper to parse ``LogEntry`` protobuf into a dictionary. The ``proto_payload`` field in ``LogEntry`` is of type ``Any``. This can be problematic if the type URL in the payload isn't in the ``google.protobuf`` registry. To help with parsing unregistered types, ...
Special helper to parse ``LogEntry`` protobuf into a dictionary. The ``proto_payload`` field in ``LogEntry`` is of type ``Any``. This can be problematic if the type URL in the payload isn't in the ``google.protobuf`` registry. To help with parsing unregistered types, this function will remove ``proto_p...
def append_items(self, items, **kwargs): """ Method to append data to multiple :class:`~.Item` objects. This method differs from the normal :meth:`append_multi` in that each `Item`'s `value` field is updated with the appended data upon successful completion of the operation. ...
Method to append data to multiple :class:`~.Item` objects. This method differs from the normal :meth:`append_multi` in that each `Item`'s `value` field is updated with the appended data upon successful completion of the operation. :param items: The item dictionary. The value for each ...
def _lincomb(self, a, x1, b, x2, out): """Raw linear combination.""" self.tspace._lincomb(a, x1.tensor, b, x2.tensor, out.tensor)
Raw linear combination.
def get_type_item(self, value): """ Converts the input to a ``SharedVolume`` or ``HostVolume`` tuple for a host bind. Input can be a single string, a list or tuple, or a single-entry dictionary. Single values are assumed to be volume aliases for read-write access. Tuples or lists with tw...
Converts the input to a ``SharedVolume`` or ``HostVolume`` tuple for a host bind. Input can be a single string, a list or tuple, or a single-entry dictionary. Single values are assumed to be volume aliases for read-write access. Tuples or lists with two elements, can be ``(alias, read-only indic...
def from_dict(d): """Transform the dict to a DateRange object.""" start = d.get('start') end = d.get('end') if not (start and end): raise ValueError('DateRange must have both start and end') start = str_to_date(start) end = str_to_date(end) ret...
Transform the dict to a DateRange object.
def display_video_hooks(hook_args): """Hooks to display videos at decode time.""" predictions = hook_args.predictions max_outputs = hook_args.decode_hparams.max_display_outputs max_decodes = hook_args.decode_hparams.max_display_decodes with tf.Graph().as_default(): _, best_decodes = video_metrics.compute...
Hooks to display videos at decode time.
def parse_track_header(self, fp): """Return the size of the track chunk.""" # Check the header try: h = fp.read(4) self.bytes_read += 4 except: raise IOError("Couldn't read track header from file. Byte %d." % self.bytes_read) ...
Return the size of the track chunk.
def has_code( state, text, incorrect_msg="Check the {ast_path}. The checker expected to find {text}.", fixed=False, ): """Test whether the student code contains text. Args: state: State instance describing student and solution code. Can be omitted if used with Ex(). text : text ...
Test whether the student code contains text. Args: state: State instance describing student and solution code. Can be omitted if used with Ex(). text : text that student code must contain. Can be a regex pattern or a simple string. incorrect_msg: feedback message if text is not in student c...
def gmdaArray(arry, dtype, mask=None, numGhosts=1): """ ghosted distributed array constructor @param arry numpy-like array @param numGhosts the number of ghosts (>= 0) """ a = numpy.array(arry, dtype) res = GhostedMaskedDistArray(a.shape, a.dtype) res.mask = mask res.setNumberOfGhost...
ghosted distributed array constructor @param arry numpy-like array @param numGhosts the number of ghosts (>= 0)
def abut (source,*args): """ Like the |Stat abut command. It concatenates two lists side-by-side and returns the result. '2D' lists are also accomodated for either argument (source or addon). CAUTION: If one list is shorter, it will be repeated until it is as long as the longest list. If this behavior is not d...
Like the |Stat abut command. It concatenates two lists side-by-side and returns the result. '2D' lists are also accomodated for either argument (source or addon). CAUTION: If one list is shorter, it will be repeated until it is as long as the longest list. If this behavior is not desired, use pstat.simpleabut(). ...
def as_date(dat): """Return the RFC3339 UTC string representation of the given date and time. Args: dat (:py:class:`datetime.date`): the object/type to be serialized. Raises: TypeError: when ``o`` is not an instance of ``datetime.date``. Returns: (str) JSON seriali...
Return the RFC3339 UTC string representation of the given date and time. Args: dat (:py:class:`datetime.date`): the object/type to be serialized. Raises: TypeError: when ``o`` is not an instance of ``datetime.date``. Returns: (str) JSON serializable type for the given ...
def find(self, gridpos, find_string, flags, search_result=True): """Return next position of event_find_string in MainGrid Parameters: ----------- gridpos: 3-tuple of Integer \tPosition at which the search starts find_string: String \tString to find in grid ...
Return next position of event_find_string in MainGrid Parameters: ----------- gridpos: 3-tuple of Integer \tPosition at which the search starts find_string: String \tString to find in grid flags: List of strings \tSearch flag out of \t["UP" xor "D...
def _init_map(self): """stub""" SimpleDifficultyItemFormRecord._init_map(self) SourceItemFormRecord._init_map(self) PDFPreviewFormRecord._init_map(self) PublishedFormRecord._init_map(self) ProvenanceFormRecord._init_map(self) super(MecQBankBaseMixin, self)._init_m...
stub
def link_reads(self, analysistype): """ Create folders with relative symlinks to the desired simulated/sampled reads. These folders will contain all the reads created for each sample, and will be processed with GeneSippr and COWBAT pipelines :param analysistype: Current analysis type. Wi...
Create folders with relative symlinks to the desired simulated/sampled reads. These folders will contain all the reads created for each sample, and will be processed with GeneSippr and COWBAT pipelines :param analysistype: Current analysis type. Will either be 'simulated' or 'sampled'
def extend_to_data(self, data, **kwargs): """Build transition matrix from new data to the graph Creates a transition matrix such that `Y` can be approximated by a linear combination of landmarks. Any transformation of the landmarks can be trivially applied to `Y` by performing ...
Build transition matrix from new data to the graph Creates a transition matrix such that `Y` can be approximated by a linear combination of landmarks. Any transformation of the landmarks can be trivially applied to `Y` by performing `transform_Y = transitions.dot(transform)` ...
def parse_url(url): """ Parse the given url and update it with environment value if required. :param basestring url: :rtype: basestring :raise: KeyError if environment variable is needed but not found. """ # the url has to be a unicode by pystache's design, but the unicode concept has been rewa...
Parse the given url and update it with environment value if required. :param basestring url: :rtype: basestring :raise: KeyError if environment variable is needed but not found.
def detach(self): """ Detaches this volume from any device it may be attached to. If it is not attached, nothing happens. """ attachments = self.attachments if not attachments: # Not attached; no error needed, just return return # A volume ...
Detaches this volume from any device it may be attached to. If it is not attached, nothing happens.
def get_log_slice(db, job_id, start, stop): """ Get a slice of the calculation log as a JSON list of rows :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID :param start: start of the slice :param stop: end of the slice (the last ele...
Get a slice of the calculation log as a JSON list of rows :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID :param start: start of the slice :param stop: end of the slice (the last element is excluded)
def find_response_component(self, api_id=None, signature_id=None): ''' Find one or many repsonse components. Args: api_id (str): Api id associated with the component(s) to be retrieved. signature_id (str): Signature id associated with the component(s)...
Find one or many repsonse components. Args: api_id (str): Api id associated with the component(s) to be retrieved. signature_id (str): Signature id associated with the component(s) to be retrieved. Returns: A list of dictionaries ...
def set_service(self, name, service_config, project=False): """ Store a ServiceConfig in the keychain """ if not self.project_config.services or name not in self.project_config.services: self._raise_service_not_valid(name) self._validate_service(name, service_config) self._se...
Store a ServiceConfig in the keychain
def parse_data(self, sline): """This function builds the addRawResults dictionary using the header values of the labels section as sample Ids. """ if sline[0] == '': return 0 for idx, label in enumerate(self._labels_values[sline[0]]): if label != '': ...
This function builds the addRawResults dictionary using the header values of the labels section as sample Ids.
def branch_out(self, limb=None): ''' Set the individual section branches This adds the various sections of the config file into the tree environment for access later. Optically can specify a specific branch. This does not yet load them into the os environment. Parameters: ...
Set the individual section branches This adds the various sections of the config file into the tree environment for access later. Optically can specify a specific branch. This does not yet load them into the os environment. Parameters: limb (str/list): The ...
def serialize_streamnet(streamnet_file, output_reach_file): """Eliminate reach with zero length and return the reach ID map. Args: streamnet_file: original stream net ESRI shapefile output_reach_file: serialized stream net, ESRI shapefile Returns: id pairs {o...
Eliminate reach with zero length and return the reach ID map. Args: streamnet_file: original stream net ESRI shapefile output_reach_file: serialized stream net, ESRI shapefile Returns: id pairs {origin: newly assigned}
def import_from_txt( filename_or_fobj, encoding="utf-8", frame_style=FRAME_SENTINEL, *args, **kwargs ): """Return a rows.Table created from imported TXT file.""" # TODO: (maybe) # enable parsing of non-fixed-width-columns # with old algorithm - that would just split columns # at the vertical se...
Return a rows.Table created from imported TXT file.
def run_pointfinder(self): """ Run PointFinder on the FASTA sequences extracted from the raw reads """ logging.info('Running PointFinder on FASTA files') for i in range(len(self.runmetadata.samples)): # Start threads threads = Thread(target=self.pointfinde...
Run PointFinder on the FASTA sequences extracted from the raw reads
def from_cfunits(cls, units) -> 'Date': """Return a |Date| object representing the reference date of the given `units` string agreeing with the NetCDF-CF conventions. The following example string is taken from the `Time Coordinate`_ chapter of the NetCDF-CF conventions documentation (mo...
Return a |Date| object representing the reference date of the given `units` string agreeing with the NetCDF-CF conventions. The following example string is taken from the `Time Coordinate`_ chapter of the NetCDF-CF conventions documentation (modified). Note that the first entry (the uni...
def download_and_expand(self): """Download and expand RPM Python binding.""" top_dir_name = None if self.git_branch: # Download a source by git clone. top_dir_name = self._download_and_expand_by_git() else: # Download a source from the arcihve URL. ...
Download and expand RPM Python binding.
def rm_subtitles(path): """ delete all subtitles in path recursively """ sub_exts = ['ass', 'srt', 'sub'] count = 0 for root, dirs, files in os.walk(path): for f in files: _, ext = os.path.splitext(f) ext = ext[1:] if ext in sub_exts: p = o...
delete all subtitles in path recursively
def f_nc(self,x): ''' Wrapper of *f*: takes an input x with size of the noncontext dimensions expands it and evaluates the entire function. ''' x = np.atleast_2d(x) xx = self.context_manager._expand_vector(x) if x.shape[0] == 1: return self.f(xx)[0] ...
Wrapper of *f*: takes an input x with size of the noncontext dimensions expands it and evaluates the entire function.
def _convert_observ(self, observ): """Convert the observation to 32 bits. Args: observ: Numpy observation. Raises: ValueError: Observation contains infinite values. Returns: Numpy observation with 32-bit data type. """ if not np.isfinite(observ).all(): raise ValueError...
Convert the observation to 32 bits. Args: observ: Numpy observation. Raises: ValueError: Observation contains infinite values. Returns: Numpy observation with 32-bit data type.
def add_column(self, data, column_name="", inplace=False): """ Returns an SFrame with a new column. The number of elements in the data given must match the length of every other column of the SFrame. If no name is given, a default name is chosen. If inplace == False (default) th...
Returns an SFrame with a new column. The number of elements in the data given must match the length of every other column of the SFrame. If no name is given, a default name is chosen. If inplace == False (default) this operation does not modify the current SFrame, returning a new SFrame...
def _MergeOptional(self, a, b): """Tries to merge two values which may be None. If both values are not None, they are required to be the same and the merge is trivial. If one of the values is None and the other is not None, the merge results in the one which is not None. If both are None, the merge ...
Tries to merge two values which may be None. If both values are not None, they are required to be the same and the merge is trivial. If one of the values is None and the other is not None, the merge results in the one which is not None. If both are None, the merge results in None. Args: a: T...
def __Languages_comboBox_set_default_view_state(self): """ Sets the **Languages_comboBox** Widget default View state. """ if not self.__container.has_editor_tab(): return editor = self.__container.get_current_editor() index = self.Languages_comboBox.findText...
Sets the **Languages_comboBox** Widget default View state.
def make_fixed_temp_multi_apec(kTs, name_template='apec%d', norm=None): """Create a model summing multiple APEC components at fixed temperatures. *kTs* An iterable of temperatures for the components, in keV. *name_template* = 'apec%d' A template to use for the names of each component; it is str...
Create a model summing multiple APEC components at fixed temperatures. *kTs* An iterable of temperatures for the components, in keV. *name_template* = 'apec%d' A template to use for the names of each component; it is string-formatted with the 0-based component number as an argument. *norm...
def save_location(self, filename: str, location: PostLocation, mtime: datetime) -> None: """Save post location name and Google Maps link.""" filename += '_location.txt' location_string = (location.name + "\n" + "https://maps.google.com/maps?q={0},{1}&ll={0},{1}\n".form...
Save post location name and Google Maps link.
def whisper_filename(self): """Build a file path to the Whisper database""" source_name = self.source_id and self.source.name or '' return get_valid_filename("{0}__{1}.wsp".format(source_name, self.name))
Build a file path to the Whisper database
def _insert_manifest_item(configurator, key, item): """ Insert an item in the list of an existing manifest key """ with _open_manifest(configurator) as f: manifest = f.read() if item in ast.literal_eval(manifest).get(key, []): return pattern = """(["']{}["']:\\s*\\[)""".format(key) r...
Insert an item in the list of an existing manifest key
def recipe_create(backend, kitchen, name): """ Create a new Recipe """ err_str, use_kitchen = Backend.get_kitchen_from_user(kitchen) if use_kitchen is None: raise click.ClickException(err_str) click.secho("%s - Creating Recipe %s for Kitchen '%s'" % (get_datetime(), name, use_kitchen), f...
Create a new Recipe
def _uptrace(nodelist, node): ''' ๋…ธ๋“œ๋ฅผ ์ƒํ–ฅ ์ถ”์ ํ•œ๋‹ค. ํ˜„ ๋…ธ๋“œ๋กœ๋ถ€ํ„ฐ ์กฐ์ƒ ๋…ธ๋“œ๋“ค์„ ์ฐจ๋ก€๋กœ ์ˆœํšŒํ•˜๋ฉฐ ๋ฐ˜ํ™˜ํ•œ๋‹ค. ๋ฃจํŠธ ๋…ธ๋“œ๋Š” ์ œ์™ธํ•œ๋‹ค. ''' if node.parent_index is None: return parent = nodelist[node.parent_index] for x in _uptrace(nodelist, parent): yield x yield node
๋…ธ๋“œ๋ฅผ ์ƒํ–ฅ ์ถ”์ ํ•œ๋‹ค. ํ˜„ ๋…ธ๋“œ๋กœ๋ถ€ํ„ฐ ์กฐ์ƒ ๋…ธ๋“œ๋“ค์„ ์ฐจ๋ก€๋กœ ์ˆœํšŒํ•˜๋ฉฐ ๋ฐ˜ํ™˜ํ•œ๋‹ค. ๋ฃจํŠธ ๋…ธ๋“œ๋Š” ์ œ์™ธํ•œ๋‹ค.
def parse_csv_headers(dataset_id): """Return the first row of a CSV as a list of headers.""" data = Dataset.objects.get(pk=dataset_id) with open(data.dataset_file.path, 'r') as datasetFile: csvReader = reader(datasetFile, delimiter=',', quotechar='"') headers = next(csvReader) # prin...
Return the first row of a CSV as a list of headers.
def config_reader(self, config_level=None): """ :return: GitConfigParser allowing to read the full git configuration, but not to write it The configuration will include values from the system, user and repository configuration files. :param config_level: ...
:return: GitConfigParser allowing to read the full git configuration, but not to write it The configuration will include values from the system, user and repository configuration files. :param config_level: For possible values, see config_writer method ...
def to_dict(self): """Return a dict representation of this instance""" data = {} if self.created_at: data['created_at'] = self.created_at.strftime( '%Y-%m-%dT%H:%M:%S%z') if self.image_id: data['image_id'] = self.image_id if self.permalink...
Return a dict representation of this instance
def klm(p, q): """Compute the KLM divergence.""" p, q = flatten(p), flatten(q) return max(abs(p * np.nan_to_num(np.log(p / q))))
Compute the KLM divergence.
def parse_numpy_doc(doc): """ Extract the text from the various sections of a numpy-formatted docstring. Parameters ---------- doc: Union[str, None] Returns ------- OrderedDict[str, Union[None,str]] The extracted numpy-styled docstring sections.""" ...
Extract the text from the various sections of a numpy-formatted docstring. Parameters ---------- doc: Union[str, None] Returns ------- OrderedDict[str, Union[None,str]] The extracted numpy-styled docstring sections.
def permit_event(self, column=None, value=None, **kwargs): """ A permit event tracks the lifecycle of a permit from issuance to expiration. Examples include 'Application Received' and 'Permit Issued', etc. >>> PCS().permit_event('event_actual_date', '16-MAR-04') """ ...
A permit event tracks the lifecycle of a permit from issuance to expiration. Examples include 'Application Received' and 'Permit Issued', etc. >>> PCS().permit_event('event_actual_date', '16-MAR-04')
def _set_anchor(self, value): """ Subclasses may override this method. """ pX, pY = self.anchor x, y = value dX = x - pX dY = y - pY self.moveBy((dX, dY))
Subclasses may override this method.
def send_change_notification(hub, topic_url, updated_content=None): """7. Content Distribution""" if updated_content: body = base64.b64decode(updated_content['content']) else: body, updated_content = get_new_content(hub.config, topic_url) b64_body = updated_content['content'] heade...
7. Content Distribution
def _get_zset(self, name, operation, create=False): """ Get (and maybe create) a sorted set by name. """ return self._get_by_type(name, operation, create, b'zset', SortedSet(), return_default=False)
Get (and maybe create) a sorted set by name.
def execute_command(self): """ The web command runs the Scrapple web interface through a simple \ `Flask <http://flask.pocoo.org>`_ app. When the execute_command() method is called from the \ :ref:`runCLI() <implementation-cli>` function, it starts of two simultaneous \ ...
The web command runs the Scrapple web interface through a simple \ `Flask <http://flask.pocoo.org>`_ app. When the execute_command() method is called from the \ :ref:`runCLI() <implementation-cli>` function, it starts of two simultaneous \ processes : - Calls the run_flask() ...
def getUserInfo(self): """ Query the GET /hsapi/userInfo/ REST end point of the HydroShare server. :raises: HydroShareHTTPException to signal an HTTP error :return: A JSON object representing user info, for example: { "username": "username", "first_name...
Query the GET /hsapi/userInfo/ REST end point of the HydroShare server. :raises: HydroShareHTTPException to signal an HTTP error :return: A JSON object representing user info, for example: { "username": "username", "first_name": "First", "last_name": "Last"...
def getSharedSecretPassive( self, passive_exchange_data, allow_no_otpk = False, keep_otpk = False ): """ Do the key exchange, as the passive party. This involves retrieving data about the key exchange from the active party. :param passive_exchange_dat...
Do the key exchange, as the passive party. This involves retrieving data about the key exchange from the active party. :param passive_exchange_data: A structure generated by the active party, which contains data requried to complete the key exchange. See the "to_other" part of t...
def create_repo(self, name, description=github.GithubObject.NotSet, homepage=github.GithubObject.NotSet, private=github.GithubObject.NotSet, has_issues=github.GithubObject.NotSet, has_wiki=github.GithubObject.NotSet, has_downloads=github.GithubObject.NotSet, h...
:calls: `POST /user/repos <http://developer.github.com/v3/repos>`_ :param name: string :param description: string :param homepage: string :param private: bool :param has_issues: bool :param has_wiki: bool :param has_downloads: bool :param has_projects: boo...
def gmres_mgs(A, b, x0=None, tol=1e-5, restrt=None, maxiter=None, xtype=None, M=None, callback=None, residuals=None, reorth=False): """Generalized Minimum Residual Method (GMRES) based on MGS. GMRES iteratively refines the initial solution guess to the system Ax = b Modified Gram-Schmidt ...
Generalized Minimum Residual Method (GMRES) based on MGS. GMRES iteratively refines the initial solution guess to the system Ax = b Modified Gram-Schmidt version Parameters ---------- A : array, matrix, sparse matrix, LinearOperator n x n, linear system to solve b : array, matrix ...
def history(self, assets, dts, field, is_perspective_after): """ A window of pricing data with adjustments applied assuming that the end of the window is the day before the current simulation time. Parameters ---------- assets : iterable of Assets The assets ...
A window of pricing data with adjustments applied assuming that the end of the window is the day before the current simulation time. Parameters ---------- assets : iterable of Assets The assets in the window. dts : iterable of datetime64-like The datetime...
def get_namespace_by_url(self, url: str) -> Optional[Namespace]: """Look up a namespace by url.""" return self.session.query(Namespace).filter(Namespace.url == url).one_or_none()
Look up a namespace by url.
def lookup(self, section, name): """Lookup config value.""" value = os.environ.get('AMAAS_{}'.format(name.upper())) if value: return value try: value = self.file_config.get(section, name) except ConfigParserError: pass else: ...
Lookup config value.
def _process_state_in_progress(self, job_record): """ method that takes care of processing job records in STATE_IN_PROGRESS state""" start_timeperiod = self.compute_start_timeperiod(job_record.process_name, job_record.timeperiod) end_timeperiod = self.compute_end_timeperiod(job_record.process_na...
method that takes care of processing job records in STATE_IN_PROGRESS state
def create_resource(cls, request_json): r""" Used to create a node in the database of type 'cls' in response to a POST request. create_resource should only \ be invoked on a resource when the client specifies a POST request. :param request_json: a dictionary formatted according to the s...
r""" Used to create a node in the database of type 'cls' in response to a POST request. create_resource should only \ be invoked on a resource when the client specifies a POST request. :param request_json: a dictionary formatted according to the specification at \ http://jsonapi.org/for...
def do_execute(self): """ The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str """ if isinstance(self.input.payload, Instances): inst = None data = self.input.payload else: inst = ...
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str