code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def primary_measures(self): from ambry.valuetype.core import ROLE for c in self.columns: if not c.parent and c.role == ROLE.MEASURE: yield c
Iterate over the primary columns, columns which do not have a parent Also sets the property partition_stats to the stats collection for the partition and column.
def add_linked_station(self, datfile, station, location=None): if datfile not in self.fixed_stations: self.fixed_stations[datfile] = {station: location} else: self.fixed_stations[datfile][station] = location if location and not self.base_location: self._utm_zone = location.zone self._utm_datum = location.datum self._utm_convergence = location.convergence
Add a linked or fixed station
def history_backward(self, count=1): self._set_history_search() found_something = False for i in range(self.working_index - 1, -1, -1): if self._history_matches(i): self.working_index = i count -= 1 found_something = True if count == 0: break if found_something: self.cursor_position = len(self.text)
Move backwards through history.
def downcast(self, dtypes=None): if dtypes is False: return self values = self.values if self._is_single_block: if dtypes is None: dtypes = 'infer' nv = maybe_downcast_to_dtype(values, dtypes) return self.make_block(nv) if dtypes is None: return self if not (dtypes == 'infer' or isinstance(dtypes, dict)): raise ValueError("downcast must have a dictionary or 'infer' as " "its argument") def f(m, v, i): if dtypes == 'infer': dtype = 'infer' else: raise AssertionError("dtypes as dict is not supported yet") if dtype is not None: v = maybe_downcast_to_dtype(v, dtype) return v return self.split_and_operate(None, f, False)
try to downcast each item to the dict of dtypes if present
def send_confirmation_email(self): form = self._get_form('SECURITY_SEND_CONFIRMATION_FORM') if form.validate_on_submit(): self.security_service.send_email_confirmation_instructions(form.user) self.flash(_('flask_unchained.bundles.security:flash.confirmation_request', email=form.user.email), category='info') if request.is_json: return '', HTTPStatus.NO_CONTENT elif form.errors and request.is_json: return self.errors(form.errors) return self.render('send_confirmation_email', send_confirmation_form=form, **self.security.run_ctx_processor('send_confirmation_email'))
View function which sends confirmation token and instructions to a user.
def expand_file_arguments(): new_args = [] expanded = False for arg in sys.argv: if arg.startswith("@"): expanded = True with open(arg[1:],"r") as f: for line in f.readlines(): new_args += shlex.split(line) else: new_args.append(arg) if expanded: print("esptool.py %s" % (" ".join(new_args[1:]))) sys.argv = new_args
Any argument starting with "@" gets replaced with all values read from a text file. Text file arguments can be split by newline or by space. Values are added "as-is", as if they were specified in this order on the command line.
def create_backup(name): r if name in list_backups(): raise CommandExecutionError('Backup already present: {0}'.format(name)) ps_cmd = ['Backup-WebConfiguration', '-Name', "'{0}'".format(name)] cmd_ret = _srvmgr(ps_cmd) if cmd_ret['retcode'] != 0: msg = 'Unable to backup web configuration: {0}\nError: {1}' \ ''.format(name, cmd_ret['stderr']) raise CommandExecutionError(msg) return name in list_backups()
r''' Backup an IIS Configuration on the System. .. versionadded:: 2017.7.0 .. note:: Backups are stored in the ``$env:Windir\System32\inetsrv\backup`` folder. Args: name (str): The name to give the backup Returns: bool: True if successful, otherwise False CLI Example: .. code-block:: bash salt '*' win_iis.create_backup good_config_20170209
def _write_bed_header(self): final_byte = 1 if self._bed_format == "SNP-major" else 0 self._bed.write(bytearray((108, 27, final_byte)))
Writes the BED first 3 bytes.
def range_piles(ranges): endpoints = _make_endpoints(ranges) for seqid, ends in groupby(endpoints, lambda x: x[0]): active = [] depth = 0 for seqid, pos, leftright, i, score in ends: if leftright == LEFT: active.append(i) depth += 1 else: depth -= 1 if depth == 0 and active: yield active active = []
Return piles of intervals that overlap. The piles are only interrupted by regions of zero coverage. >>> ranges = [Range("2", 0, 1, 3, 0), Range("2", 1, 4, 3, 1), Range("3", 5, 7, 3, 2)] >>> list(range_piles(ranges)) [[0, 1], [2]]
def predict(self, X): return self.__cost(self.__unroll(self.__thetas), 0, np.matrix(X))
Returns predictions of input test cases.
def to_json(self, *, indent=None, sort_keys = False): return json.dumps({k: v for k, v in dict(self).items() if v is not None}, indent=indent, sort_keys=sort_keys, default=self._try_dict)
Gets the object's JSON representation. Parameters ---------- indent: :class:`int`, optional Number of spaces used as indentation, ``None`` will return the shortest possible string. sort_keys: :class:`bool`, optional Whether keys should be sorted alphabetically or preserve the order defined by the object. Returns ------- :class:`str` JSON representation of the object.
def data(self, index, role): if not index.isValid() or \ index.row() >= self._shape[0] or \ index.column() >= self._shape[1]: return None row, col = ((index.row(), index.column()) if self.axis == 0 else (index.column(), index.row())) if role != Qt.DisplayRole: return None if self.axis == 0 and self._shape[0] <= 1: return None header = self.model.header(self.axis, col, row) if not is_type_text_string(header): header = to_text_string(header) return header
Get the data for the header. This is used when a header has levels.
def reset_server_and_request_check(self, address): with self._lock: self._reset_server(address) self._request_check(address)
Clear our pool for a server, mark it Unknown, and check it soon.
def superuser_api_key_required(f): @functools.wraps(f) def wrapped(*args, **kwargs): api_key = current_api_key() g.api_key = api_key utils.jsonify_assert( api_key.superuser, 'API key=%r must be a super user' % api_key.id, 403) return f(*args, **kwargs) return wrapped
Decorator ensures only superuser API keys can request this function.
def flow_pipemajor(Diam, HeadLossFric, Length, Nu, PipeRough): FlowHagen = flow_hagen(Diam, HeadLossFric, Length, Nu).magnitude if FlowHagen < flow_transition(Diam, Nu).magnitude: return FlowHagen else: return flow_swamee(Diam, HeadLossFric, Length, Nu, PipeRough).magnitude
Return the flow rate with only major losses. This function applies to both laminar and turbulent flows.
async def create_turn_endpoint(protocol_factory, server_addr, username, password, lifetime=600, ssl=False, transport='udp'): loop = asyncio.get_event_loop() if transport == 'tcp': _, inner_protocol = await loop.create_connection( lambda: TurnClientTcpProtocol(server_addr, username=username, password=password, lifetime=lifetime), host=server_addr[0], port=server_addr[1], ssl=ssl) else: _, inner_protocol = await loop.create_datagram_endpoint( lambda: TurnClientUdpProtocol(server_addr, username=username, password=password, lifetime=lifetime), remote_addr=server_addr) protocol = protocol_factory() transport = TurnTransport(protocol, inner_protocol) await transport._connect() return transport, protocol
Create datagram connection relayed over TURN.
def get_snmp_information(self): snmp_information = {} snmp_config = junos_views.junos_snmp_config_table(self.device) snmp_config.get() snmp_items = snmp_config.items() if not snmp_items: return snmp_information snmp_information = { py23_compat.text_type(ele[0]): ele[1] if ele[1] else "" for ele in snmp_items[0][1] } snmp_information["community"] = {} communities_table = snmp_information.pop("communities_table") if not communities_table: return snmp_information for community in communities_table.items(): community_name = py23_compat.text_type(community[0]) community_details = {"acl": ""} community_details.update( { py23_compat.text_type(ele[0]): py23_compat.text_type( ele[1] if ele[0] != "mode" else C.SNMP_AUTHORIZATION_MODE_MAP.get(ele[1]) ) for ele in community[1] } ) snmp_information["community"][community_name] = community_details return snmp_information
Return the SNMP configuration.
def classify_format(f): l0, l1 = _get_two_lines(f) if loader.glove.check_valid(l0, l1): return _glove elif loader.word2vec_text.check_valid(l0, l1): return _word2vec_text elif loader.word2vec_bin.check_valid(l0, l1): return _word2vec_bin else: raise OSError(b"Invalid format")
Determine the format of word embedding file by their content. This operation only looks at the first two lines and does not check the sanity of input file. Args: f (Filelike): Returns: class
def get_data(self, request=None): if request is None: raise ValueError data = [[] for _ in self.sources] for i in range(request): try: for source_data, example in zip( data, next(self.child_epoch_iterator)): source_data.append(example) except StopIteration: if not self.strictness and data[0]: break elif self.strictness > 1 and data[0]: raise ValueError raise return tuple(numpy.asarray(source_data) for source_data in data)
Get data from the dataset.
def safe_urlencode(params, doseq=0): if IS_PY3: return urlencode(params, doseq) if hasattr(params, "items"): params = params.items() new_params = [] for k, v in params: k = k.encode("utf-8") if isinstance(v, (list, tuple)): new_params.append((k, [force_bytes(i) for i in v])) else: new_params.append((k, force_bytes(v))) return urlencode(new_params, doseq)
UTF-8-safe version of safe_urlencode The stdlib safe_urlencode prior to Python 3.x chokes on UTF-8 values which can't fail down to ascii.
def copy(self): return _TimeAnchor(self.reading_id, self.uptime, self.utc, self.is_break, self.exact)
Return a copy of this _TimeAnchor.
def load_texture(self, file_path): self.image = pygame.image.load(file_path) self.apply_texture(self.image)
Generate our sprite's surface by loading the specified image from disk. Note that this automatically centers the origin.
def register_updates(self, callback): _LOGGER.debug("Registered callback for state: %s", self._stateName) self._observer_callbacks.append(callback)
Register a callback to notify a listener of state changes.
def post_migrate(cls, sender=None, **kwargs): ContentType = apps.get_model('contenttypes', 'ContentType') for model_name, proxy_model in sender.get_proxy_models().items(): ctype, created = ContentType.objects.get_or_create(app_label=sender.label, model=model_name) if created: sender.grant_permissions(proxy_model)
Iterate over fake_proxy_models and add contenttypes and permissions for missing proxy models, if this has not been done by Django yet
def bind_field( self, form: DynamicForm, unbound_field: UnboundField, options: Dict[Any, Any], ) -> Field: filters = unbound_field.kwargs.get('filters', []) filters.append(lambda x: x.strip() if isinstance(x, str) else x) return unbound_field.bind(form=form, filters=filters, **options)
Customize how fields are bound by stripping all whitespace. :param form: The form :param unbound_field: The unbound field :param options: The field options :returns: The bound field
def validate_default_element(self, value): if isinstance(value, (six.string_types, six.integer_types)): if self.__type: self.__type(value) return value return super(EnumField, self).validate_default_element(value)
Validate default element of Enum field. Enum fields allow for delayed resolution of default values when the type of the field has not been resolved. The default value of a field may be a string or an integer. If the Enum type of the field has been resolved, the default value is validated against that type. Args: value: Value to validate. Raises: ValidationError if value is not expected message type.
def get_example_features(example): return (example.features.feature if isinstance(example, tf.train.Example) else example.context.feature)
Returns the non-sequence features from the provided example.
def clean_highlight(self): if not self.valid: return for hit in self._results['hits']['hits']: if 'highlight' in hit: hl = hit['highlight'] for key, item in list(hl.items()): if not item: del hl[key]
Remove the empty highlight
def machines(self): if self._resources is None: self.__init() if "machines" in self._resources: url = self._url + "/machines" return _machines.Machines(url, securityHandler=self._securityHandler, initialize=False, proxy_url=self._proxy_url, proxy_port=self._proxy_port) else: return None
gets a reference to the machines object
def evaluate_extracted_tokens(gold_content, extr_content): if isinstance(gold_content, string_): gold_content = simple_tokenizer(gold_content) if isinstance(extr_content, string_): extr_content = simple_tokenizer(extr_content) gold_set = set(gold_content) extr_set = set(extr_content) jaccard = len(gold_set & extr_set) / len(gold_set | extr_set) levenshtein = dameraulevenshtein(gold_content, extr_content) return {'jaccard': jaccard, 'levenshtein': levenshtein}
Evaluate the similarity between gold-standard and extracted content, typically for a single HTML document, as another way of evaluating the performance of an extractor model. Args: gold_content (str or Sequence[str]): Gold-standard content, either as a string or as an already-tokenized list of tokens. extr_content (str or Sequence[str]): Extracted content, either as a string or as an already-tokenized list of tokens. Returns: Dict[str, float]
def _start_console(self): self._remote_pipe = yield from asyncio_open_serial(self._get_pipe_name()) server = AsyncioTelnetServer(reader=self._remote_pipe, writer=self._remote_pipe, binary=True, echo=True) self._telnet_server = yield from asyncio.start_server(server.run, self._manager.port_manager.console_host, self.console)
Starts remote console support for this VM.
def find_npolfile(flist,detector,filters): npolfile = None for f in flist: fdet = fits.getval(f, 'detector', memmap=False) if fdet == detector: filt1 = fits.getval(f, 'filter1', memmap=False) filt2 = fits.getval(f, 'filter2', memmap=False) fdate = fits.getval(f, 'date', memmap=False) if filt1 == 'ANY' or \ (filt1 == filters[0] and filt2 == filters[1]): npolfile = f return npolfile
Search a list of files for one that matches the configuration of detector and filters used.
def neurite_volume_density(neurites, neurite_type=NeuriteType.all): def vol_density(neurite): return neurite.volume / convex_hull(neurite).volume return list(vol_density(n) for n in iter_neurites(neurites, filt=is_type(neurite_type)))
Get the volume density per neurite The volume density is defined as the ratio of the neurite volume and the volume of the neurite's enclosing convex hull
def parse_items(self, field: Field) -> Mapping[str, Any]: return self.build_parameter(field.container)
Parse the child item type for list fields, if any.
def wait_for_next_completion(self, runtime_context): if runtime_context.workflow_eval_lock is not None: runtime_context.workflow_eval_lock.wait() if self.exceptions: raise self.exceptions[0]
Wait for jobs to finish.
def make_db_data_fetcher(postgresql_conn_info, template_path, reload_templates, query_cfg, io_pool): sources = parse_source_data(query_cfg) queries_generator = make_queries_generator( sources, template_path, reload_templates) return DataFetcher( postgresql_conn_info, queries_generator, io_pool)
Returns an object which is callable with the zoom and unpadded bounds and which returns a list of rows.
def convert_content(self, fpath: str) -> typing.Optional[dict]: try: loader = self.loader_cls(fpath) except UnsupportedExtensionError: return return loader.convert_content()
Convert content of source file with loader, provided with `loader_cls` self attribute. Returns dict with converted content if loader class support source file extenstions, otherwise return nothing.
def as_json(self): self._config['applyCss'] = self.applyCss self._json['config'] = self._config return self._json
Represent effect as JSON dict.
def to_bayesian_model(self): from pgmpy.models import BayesianModel bm = BayesianModel() var_clique_dict = defaultdict(tuple) var_order = [] junction_tree = self.to_junction_tree() root_node = next(iter(junction_tree.nodes())) bfs_edges = nx.bfs_edges(junction_tree, root_node) for node in root_node: var_clique_dict[node] = root_node var_order.append(node) for edge in bfs_edges: clique_node = edge[1] for node in clique_node: if not var_clique_dict[node]: var_clique_dict[node] = clique_node var_order.append(node) for node_index in range(len(var_order)): node = var_order[node_index] node_parents = (set(var_clique_dict[node]) - set([node])).intersection( set(var_order[:node_index])) bm.add_edges_from([(parent, node) for parent in node_parents]) return bm
Creates a Bayesian Model which is a minimum I-Map for this markov model. The ordering of parents may not remain constant. It would depend on the ordering of variable in the junction tree (which is not constant) all the time. Examples -------- >>> from pgmpy.models import MarkovModel >>> from pgmpy.factors.discrete import DiscreteFactor >>> mm = MarkovModel() >>> mm.add_nodes_from(['x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7']) >>> mm.add_edges_from([('x1', 'x3'), ('x1', 'x4'), ('x2', 'x4'), ... ('x2', 'x5'), ('x3', 'x6'), ('x4', 'x6'), ... ('x4', 'x7'), ('x5', 'x7')]) >>> phi = [DiscreteFactor(edge, [2, 2], np.random.rand(4)) for edge in mm.edges()] >>> mm.add_factors(*phi) >>> bm = mm.to_bayesian_model()
def _get_states_from_samecodes(self, geocodes): states = [] for code in geocodes: if not isinstance(geocodes, list): raise Exception("specified geocodes must be list") try: state = self.samecodes[code]['state'] except KeyError: raise Exception("Samecode Not Found") else: if state not in states: states.append(state) return states
Returns all states for a given list of SAME codes *Shouldn't be used to determine feed scope, please use getfeedscope()*
def gmeta_pop(gmeta, info=False): if type(gmeta) is GlobusHTTPResponse: gmeta = json.loads(gmeta.text) elif type(gmeta) is str: gmeta = json.loads(gmeta) elif type(gmeta) is not dict: raise TypeError("gmeta must be dict, GlobusHTTPResponse, or JSON string") results = [] for res in gmeta["gmeta"]: for con in res["content"]: results.append(con) if info: fyi = { "total_query_matches": gmeta.get("total") } return results, fyi else: return results
Remove GMeta wrapping from a Globus Search result. This function can be called on the raw GlobusHTTPResponse that Search returns, or a string or dictionary representation of it. Arguments: gmeta (dict, str, or GlobusHTTPResponse): The Globus Search result to unwrap. info (bool): If ``False``, will return a list of the results and discard the metadata. If ``True``, will return a tuple containing the results list, and other information about the query. **Default**: ``False``. Returns: list (if ``info=False``): The unwrapped results. tuple (if ``info=True``): The unwrapped results, and a dictionary of query information.
def packtar(tarfile, files, srcdir): nullfd = open(os.devnull, "w") tarfile = cygpath(os.path.abspath(tarfile)) log.debug("pack tar %s from folder %s with files ", tarfile, srcdir) log.debug(files) try: check_call([TAR, '-czf', tarfile] + files, cwd=srcdir, stdout=nullfd, preexec_fn=_noumask) except Exception: log.exception("Error packing tar file %s to %s", tarfile, srcdir) raise nullfd.close()
Pack the given files into a tar, setting cwd = srcdir
def build(self, root="runs"): for d, control in self.iter(root): _mkdirs(d) with open(os.path.join(d, self.control_name), 'w') as fp: json.dump(control, fp, indent=self.indent) fp.write('\n')
Build a nested directory structure, starting in ``root`` :param root: Root directory for structure
def waypoint_current(self): if self.mavlink10(): m = self.recv_match(type='MISSION_CURRENT', blocking=True) else: m = self.recv_match(type='WAYPOINT_CURRENT', blocking=True) return m.seq
return current waypoint
def as_square_array(arr): arr = np.atleast_2d(arr) if len(arr.shape) != 2 or arr.shape[0] != arr.shape[1]: raise ValueError("Expected square array") return arr
Return arr massaged into a square array. Raises ValueError if arr cannot be so massaged.
def new_genre(self, program, genre, relevance): if self.__v_genre: print("[Genre: %s, %s, %s]" % (program, genre, relevance))
Callback run for each new program genre entry
def clone(name, repository, destination, debug=False): msg = ' - cloning {} to {}'.format(name, destination) util.print_info(msg) cmd = sh.git.bake('clone', repository, destination) util.run_command(cmd, debug=debug)
Clone the specified repository into a temporary directory and return None. :param name: A string containing the name of the repository being cloned. :param repository: A string containing the repository to clone. :param destination: A string containing the directory to clone the repository into. :param debug: An optional bool to toggle debug output. :return: None
def update_floatingip_statuses(self, context, router_id, fip_statuses): cctxt = self.client.prepare(version='1.1') return cctxt.call(context, 'update_floatingip_statuses_cfg', router_id=router_id, fip_statuses=fip_statuses)
Make a remote process call to update operational status for one or several floating IPs. @param context: contains user information @param router_id: id of router associated with the floatingips @param fip_statuses: dict with floatingip_id as key and status as value
def transform_sequence(f): @wraps(f) def wrapper(*args, **kwargs): return lambda seq: seq.map_points(partial(f, *args, **kwargs)) return wrapper
A decorator to take a function operating on a point and turn it into a function returning a callable operating on a sequence. The functions passed to this decorator must define a kwarg called "point", or have point be the last positional argument
def _adapt_response(self, response): errors, meta = super(ServerError, self)._adapt_response(response) return errors[0], meta
Convert various error responses to standardized ErrorDetails.
def plot_discrete(self, show=False, annotations=True): import matplotlib.pyplot as plt axis = plt.axes() axis.set_aspect('equal', 'datalim') for i, points in enumerate(self.discrete): color = ['g', 'k'][i in self.root] axis.plot(*points.T, color=color) if annotations: for e in self.entities: if not hasattr(e, 'plot'): continue e.plot(self.vertices) if show: plt.show() return axis
Plot the closed curves of the path.
def default_tool_argparser(description, example_parameters): import argparse epilog = '\n' for k, v in sorted(example_parameters.items()): epilog += ' ' + k + '\n' p = argparse.ArgumentParser( description=description, add_help=False, formatter_class=argparse.RawDescriptionHelpFormatter, epilog=('available values for examples (exkey):'+epilog)) return p
Create default parser for single tools.
def fap_simple(Z, fmax, t, y, dy, normalization='standard'): N = len(t) T = max(t) - min(t) N_eff = fmax * T p_s = cdf_single(Z, N, normalization=normalization) return 1 - p_s ** N_eff
False Alarm Probability based on estimated number of indep frequencies
def backlink(node): seen = set() to_see = [node] while to_see: node = to_see.pop() seen.add(node) for succ in node.next: succ.prev.add(node) if succ not in seen: to_see.append(succ)
Given a CFG with outgoing links, create incoming links.
def in_period(period, dt=None): if dt is None: dt = datetime.now() period = re.sub(r"^\s*|\s*$", '', period) period = re.sub(r"\s*(?={|$)", '', period) period = re.sub(r",\s*", ',', period) period = re.sub(r"\s*-\s*", '-', period) period = re.sub(r"{\s*", '{', period) period = re.sub(r"\s*}\s*", '}', period) period = re.sub(r"}(?=[^,])", '}|', period) period = period.lower() if period == '': return True sub_periods = re.split(',', period) for sp in sub_periods: if _is_in_sub_period(sp, dt): return True return False
Determines if a datetime is within a certain time period. If the time is omitted the current time will be used. in_period return True is the datetime is within the time period, False if not. If the expression is malformed a TimePeriod.InvalidFormat exception will be raised. (Note that this differs from Time::Period, which returns -1 if the expression is invalid). The format for the time period is like Perl's Time::Period module, which is documented in some detail here: http://search.cpan.org/~pryan/Period-1.20/Period.pm Here's the quick and dirty version. Each period is composed of one or more sub-period seperated by a comma. A datetime must match at least one of the sub periods to be considered in that time period. Each sub-period is composed of one or more tests, like so: scale {value} scale {a-b} scale {a b c} The datetime must pass each test for a sub-period for the sub-period to be considered true. For example: Match Mondays wd {mon} Match Monday mornings wd {mon} hr {9-16} Match Monday morning or Friday afternoon wd {mon} hr {0-12}, wd {fri} hr {0-12} Valid scales are: year month week yday mday wday hour minute second Those can be substituted with their corresponding code: yd mo wk yd md wd hr min sec
def sent2examples(self, sent): words = [w if w in self.embeddings else TaggerBase.UNK for w in sent] ngrams = TaggerBase.ngrams(words, self.context, self.transfer) fvs = [] for word, ngram in zip(sent, ngrams): fv = np.array([self.embeddings.get(w, self.embeddings.zero_vector()) for w in ngram]).flatten() if self.add_bias: fv = np.hstack((fv, np.array(1))) yield word, fv
Convert ngrams into feature vectors.
def set_field(self, state, field_name, field_type, value): field_ref = SimSootValue_InstanceFieldRef.get_ref(state=state, obj_alloc_id=self.heap_alloc_id, field_class_name=self.type, field_name=field_name, field_type=field_type) state.memory.store(field_ref, value)
Sets an instance field.
def create_db(with_postgis=False): local_machine() local('psql {0} -c "CREATE USER {1} WITH PASSWORD \'{2}\'"'.format( USER_AND_HOST, env.db_role, DB_PASSWORD)) local('psql {0} -c "CREATE DATABASE {1} ENCODING \'UTF8\'"'.format( USER_AND_HOST, env.db_name)) if with_postgis: local('psql {0} {1} -c "CREATE EXTENSION postgis"'.format( USER_AND_HOST, env.db_name)) local('psql {0} -c "GRANT ALL PRIVILEGES ON DATABASE {1}' ' to {2}"'.format(USER_AND_HOST, env.db_name, env.db_role)) local('psql {0} -c "GRANT ALL PRIVILEGES ON ALL TABLES' ' IN SCHEMA public TO {1}"'.format( USER_AND_HOST, env.db_role))
Creates the local database. :param with_postgis: If ``True``, the postgis extension will be installed.
def create_smooth_contour( shakemap_layer, output_file_path='', active_band=1, smoothing_method=NUMPY_SMOOTHING, smoothing_sigma=0.9): timestamp = datetime.now() temp_smoothed_shakemap_path = unique_filename( prefix='temp-shake-map' + timestamp.strftime('%Y%m%d-%H%M%S'), suffix='.tif', dir=temp_dir('temp')) temp_smoothed_shakemap_path = smooth_shakemap( shakemap_layer.source(), output_file_path=temp_smoothed_shakemap_path, active_band=active_band, smoothing_method=smoothing_method, smoothing_sigma=smoothing_sigma ) return shakemap_contour( temp_smoothed_shakemap_path, output_file_path=output_file_path, active_band=active_band )
Create contour from a shake map layer by using smoothing method. :param shakemap_layer: The shake map raster layer. :type shakemap_layer: QgsRasterLayer :param active_band: The band which the data located, default to 1. :type active_band: int :param smoothing_method: The smoothing method that wanted to be used. :type smoothing_method: NONE_SMOOTHING, NUMPY_SMOOTHING, SCIPY_SMOOTHING :param smooth_sigma: parameter for gaussian filter used in smoothing function. :type smooth_sigma: float :returns: The contour of the shake map layer path. :rtype: basestring
def break_type_id(self, break_type_id): if break_type_id is None: raise ValueError("Invalid value for `break_type_id`, must not be `None`") if len(break_type_id) < 1: raise ValueError("Invalid value for `break_type_id`, length must be greater than or equal to `1`") self._break_type_id = break_type_id
Sets the break_type_id of this ModelBreak. The `BreakType` this `Break` was templated on. :param break_type_id: The break_type_id of this ModelBreak. :type: str
def between(start, delta, end=None): toyield = start while end is None or toyield < end: yield toyield toyield += delta
Return an iterator between this date till given end point. Example usage: >>> d = datetime_tz.smartparse("5 days ago") 2008/05/12 11:45 >>> for i in d.between(timedelta(days=1), datetime_tz.now()): >>> print i 2008/05/12 11:45 2008/05/13 11:45 2008/05/14 11:45 2008/05/15 11:45 2008/05/16 11:45 Args: start: The date to start at. delta: The interval to iterate with. end: (Optional) Date to end at. If not given the iterator will never terminate. Yields: datetime_tz objects.
def _active_case(self, value: ObjectValue) -> Optional["CaseNode"]: for c in self.children: for cc in c.data_children(): if cc.iname() in value: return c
Return receiver's case that's active in an instance node value.
def get_status(self, status_value, message=None): status = etree.Element('Status') status_code = etree.SubElement(status, 'StatusCode') status_code.set('Value', 'samlp:' + status_value) if message: status_message = etree.SubElement(status, 'StatusMessage') status_message.text = message return status
Build a Status XML block for a SAML 1.1 Response.
def get_klass_children(gi_name): res = {} children = __HIERARCHY_GRAPH.successors(gi_name) for gi_name in children: ctype_name = ALL_GI_TYPES[gi_name] qs = QualifiedSymbol(type_tokens=[Link(None, ctype_name, ctype_name)]) qs.add_extension_attribute ('gi-extension', 'type_desc', SymbolTypeDesc([], gi_name, ctype_name, 0)) res[ctype_name] = qs return res
Returns a dict of qualified symbols representing the children of the klass-like symbol named gi_name
def random_population(dna_size, pop_size, tune_params): population = [] for _ in range(pop_size): dna = [] for i in range(dna_size): dna.append(random_val(i, tune_params)) population.append(dna) return population
create a random population
def stop(self): if self._stack: try: self._stack.teardown() except Exception: self.fatal(sys.exc_info()) super().stop()
Cleanup the context, after the loop ended.
def cmd_gimbal_roi(self, args): latlon = None try: latlon = self.module('map').click_position except Exception: print("No map available") return if latlon is None: print("No map click position available") return self.master.mav.mount_control_send(self.target_system, self.target_component, latlon[0]*1e7, latlon[1]*1e7, 0, 0)
control roi position
def _wrap_parse(code, filename): code = 'async def wrapper():\n' + indent(code, ' ') return ast.parse(code, filename=filename).body[0].body[0].value
async wrapper is required to avoid await calls raising a SyntaxError
def timid_relpath(arg): from os.path import isabs, relpath, sep if isabs(arg): result = relpath(arg) if result.count(sep) + 1 < arg.count(sep): return result return arg
convert an argument to a relative path, carefully
def _configure_device(commands, **kwargs): if salt.utils.platform.is_proxy(): return __proxy__['nxos.proxy_config'](commands, **kwargs) else: return _nxapi_config(commands, **kwargs)
Helper function to send configuration commands to the device over a proxy minion or native minion using NX-API or SSH.
def _browse_body(self, search_id): xml = self._base_body() XML.SubElement(xml, 's:Body') item_attrib = { 'xmlns': 'http://www.sonos.com/Services/1.1' } search = XML.SubElement(xml[1], 'getMetadata', item_attrib) XML.SubElement(search, 'id').text = search_id XML.SubElement(search, 'index').text = '0' XML.SubElement(search, 'count').text = '100' return XML.tostring(xml)
Return the browse XML body. The XML is formed by adding, to the envelope of the XML returned by ``self._base_body``, the following ``Body`` part: .. code :: xml <s:Body> <getMetadata xmlns="http://www.sonos.com/Services/1.1"> <id>root</id> <index>0</index> <count>100</count> </getMetadata> </s:Body> .. note:: The XML contains index and count, but the service does not seem to respect them, so therefore they have not been included as arguments.
def get_min_isr(zk, topic): ISR_CONF_NAME = 'min.insync.replicas' try: config = zk.get_topic_config(topic) except NoNodeError: return None if ISR_CONF_NAME in config['config']: return int(config['config'][ISR_CONF_NAME]) else: return None
Return the min-isr for topic, or None if not specified
def full_name(self): formatted_user = [] if self.user.first_name is not None: formatted_user.append(self.user.first_name) if self.user.last_name is not None: formatted_user.append(self.user.last_name) return " ".join(formatted_user)
Returns the first and last name of the user separated by a space.
def get_path(url): url = urlsplit(url) path = url.path if url.query: path += "?{}".format(url.query) return path
Get the path from a given url, including the querystring. Args: url (str) Returns: str
def _handle_token(self, token): try: return _HANDLERS[type(token)](self, token) except KeyError: err = "_handle_token() got unexpected {0}" raise ParserError(err.format(type(token).__name__))
Handle a single token.
def _normalize_roots(file_roots): for saltenv, dirs in six.iteritems(file_roots): normalized_saltenv = six.text_type(saltenv) if normalized_saltenv != saltenv: file_roots[normalized_saltenv] = file_roots.pop(saltenv) if not isinstance(dirs, (list, tuple)): file_roots[normalized_saltenv] = [] file_roots[normalized_saltenv] = \ _expand_glob_path(file_roots[normalized_saltenv]) return file_roots
Normalize file or pillar roots.
def VAR_DECL(self, cursor): name = self.get_unique_name(cursor) log.debug('VAR_DECL: name: %s', name) if self.is_registered(name): return self.get_registered(name) _type = self._VAR_DECL_type(cursor) init_value = self._VAR_DECL_value(cursor, _type) log.debug('VAR_DECL: _type:%s', _type.name) log.debug('VAR_DECL: _init:%s', init_value) log.debug('VAR_DECL: location:%s', getattr(cursor, 'location')) obj = self.register(name, typedesc.Variable(name, _type, init_value)) self.set_location(obj, cursor) self.set_comment(obj, cursor) return True
Handles Variable declaration.
def authenticate(remote_addr, password, cert, key, verify_cert=True): client = pylxd_client_get(remote_addr, cert, key, verify_cert) if client.trusted: return True try: client.authenticate(password) except pylxd.exceptions.LXDAPIException as e: raise CommandExecutionError(six.text_type(e)) return client.trusted
Authenticate with a remote LXDaemon. remote_addr : An URL to a remote Server, you also have to give cert and key if you provide remote_addr and its a TCP Address! Examples: https://myserver.lan:8443 password : The password of the remote. cert : PEM Formatted SSL Certificate. Examples: ~/.config/lxc/client.crt key : PEM Formatted SSL Key. Examples: ~/.config/lxc/client.key verify_cert : True Wherever to verify the cert, this is by default True but in the most cases you want to set it off as LXD normaly uses self-signed certificates. CLI Example: .. code-block:: bash $ salt '*' lxd.authenticate https://srv01:8443 <yourpass> ~/.config/lxc/client.crt ~/.config/lxc/client.key false See the `requests-docs`_ for the SSL stuff. .. _requests-docs: http://docs.python-requests.org/en/master/user/advanced/#ssl-cert-verification
def to_bytes(self): header = self._make_header(self._checksum) return header + self._options.to_bytes()
Return packed byte representation of the TCP header.
def has_publish_permission(self, request, obj=None): permission_name = '{}.publish_{}'.format(self.opts.app_label, self.opts.model_name) has_permission = request.user.has_perm(permission_name) if obj is not None and has_permission is False: has_permission = request.user.has_perm(permission_name, obj=obj) return has_permission
Returns a boolean if the user in the request has publish permission for the object.
def finish(self, status, response): self.response = binascii.hexlify(response).decode('utf-8') self.status = status self.runtime = monotonic() - self._start_time
Mark the end of a recorded RPC.
def add_order(self, order): key = '%s_%s' % (order.region_id, order.type_id) if not self._orders.has_key(key): self.set_empty_region( order.region_id, order.type_id, order.generated_at ) self._orders[key].add_order(order)
Adds a MarketOrder instance to the list of market orders contained within this order list. Does some behind-the-scenes magic to get it all ready for serialization. :param MarketOrder order: The order to add to this order list.
def fast_memory_load(self, addr, size, data_type, endness='Iend_LE'): if data_type is int: try: return self.project.loader.memory.unpack_word(addr, size=size, endness=endness) except KeyError: return None try: data = self.project.loader.memory.load(addr, size) if data_type is str: return "".join(chr(i) for i in data) return data except KeyError: return None
Load memory bytes from loader's memory backend. :param int addr: The address to begin memory loading. :param int size: Size in bytes. :param data_type: Type of the data. :param str endness: Endianness of this memory load. :return: Data read out of the memory. :rtype: int or bytes or str or None
def _create_compositional_array_(expanded_chemical_formaula_string): element_array = re.findall( '[A-Z][^A-Z]*', expanded_chemical_formaula_string) split_element_array = [] for s in element_array: m = re.match(r"([a-zA-Z]+)([0-9\.]*)", s, re.I) if m: items = m.groups() if items[1] == "": items = (items[0], 1) this_e = {"symbol": items[0], "occurances": float(items[1])} split_element_array.append(this_e) return split_element_array
Splits an expanded chemical formula string into an array of dictionaries containing information about each element :param expanded_chemical_formaula_string: a clean (not necessarily emperical, but without any special characters) chemical formula string, as returned by _expand_formula_() :return: an array of dictionaries
def get_stream_action_type(stream_arn): stream_type_map = { "kinesis": awacs.kinesis.Action, "dynamodb": awacs.dynamodb.Action, } stream_type = stream_arn.split(":")[2] try: return stream_type_map[stream_type] except KeyError: raise ValueError( "Invalid stream type '%s' in arn '%s'" % (stream_type, stream_arn) )
Returns the awacs Action for a stream type given an arn Args: stream_arn (str): The Arn of the stream. Returns: :class:`awacs.aws.Action`: The appropriate stream type awacs Action class Raises: ValueError: If the stream type doesn't match kinesis or dynamodb.
def sender(self) -> Optional[Sequence[SingleAddressHeader]]: try: return cast(Sequence[SingleAddressHeader], self[b'sender']) except KeyError: return None
The ``Sender`` header.
def content_remove(self, key, model, contentid): path = PROVISION_MANAGE_CONTENT + model + '/' + contentid return self._request(path, key, '', 'DELETE', self._manage_by_cik)
Deletes the information for the given contentid under the given model. This method maps to https://github.com/exosite/docs/tree/master/provision#delete---delete-content Args: key: The CIK or Token for the device model:
def _default_key_setter(self, name, subject): if is_config_item(subject): self.add_item(name, subject) elif is_config_section(subject): self.add_section(name, subject) else: raise TypeError( 'Section items can only be replaced with items, ' 'got {type}. To set item value use ...{name}.value = <new_value>'.format( type=type(subject), name=name, ) )
This method is used only when there is a custom key_setter set. Do not override this method.
def update_port_side(self): from rafcon.utils.geometry import point_left_of_line p = (self._initial_pos.x, self._initial_pos.y) nw_x, nw_y, se_x, se_y = self.get_adjusted_border_positions() if point_left_of_line(p, (nw_x, nw_y), (se_x, se_y)): if point_left_of_line(p, (nw_x, se_y), (se_x, nw_y)): self._port.side = SnappedSide.TOP self.limit_pos(p[0], se_x, nw_x) else: self._port.side = SnappedSide.RIGHT self.limit_pos(p[1], se_y, nw_y) else: if point_left_of_line(p, (nw_x, se_y), (se_x, nw_y)): self._port.side = SnappedSide.LEFT self.limit_pos(p[1], se_y, nw_y) else: self._port.side = SnappedSide.BOTTOM self.limit_pos(p[0], se_x, nw_x) self.set_nearest_border()
Updates the initial position of the port The port side is ignored but calculated from the port position. Then the port position is limited to the four side lines of the state.
def get_current_client(self): try: client = self.tabwidget.currentWidget() except AttributeError: client = None if client is not None: return client
Return the currently selected notebook.
def unpack_log_data(self, log_data, timestamp): ret_data = {} data_index = 0 for var in self.variables: size = LogTocElement.get_size_from_id(var.fetch_as) name = var.name unpackstring = LogTocElement.get_unpack_string_from_id( var.fetch_as) value = struct.unpack( unpackstring, log_data[data_index:data_index + size])[0] data_index += size ret_data[name] = value self.data_received_cb.call(timestamp, ret_data, self)
Unpack received logging data so it represent real values according to the configuration in the entry
def configure_owner(self, owner='www-data'): if owner is not None: self.main_process.set_owner_params(uid=owner, gid=owner) return self
Shortcut to set process owner data. :param str|unicode owner: Sets user and group. Default: ``www-data``.
def degrees(x): if isinstance(x, UncertainFunction): mcpts = np.degrees(x._mcpts) return UncertainFunction(mcpts) else: return np.degrees(x)
Convert radians to degrees
def get_long_description(): here = os.path.abspath(os.path.dirname(__file__)) with copen(os.path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as description: return description.read()
Retrieve the long description from DESCRIPTION.rst
def get_data(self, collection): data = self._filter_queryset('view_data', collection.data.all()) return self._serialize_data(data)
Return serialized list of data objects on collection that user has `view` permission on.
def enable_aliases_autocomplete_interactive(_, **kwargs): subtree = kwargs.get('subtree', None) if not subtree or not hasattr(subtree, 'children'): return for alias, alias_command in filter_aliases(get_alias_table()): if subtree.in_tree(alias_command.split()): subtree.add_child(CommandBranch(alias))
Enable aliases autocomplete on interactive mode by injecting aliases in the command tree.
def get_product_order_book(self, product_id, level=1): params = {'level': level} return self._send_message('get', '/products/{}/book'.format(product_id), params=params)
Get a list of open orders for a product. The amount of detail shown can be customized with the `level` parameter: * 1: Only the best bid and ask * 2: Top 50 bids and asks (aggregated) * 3: Full order book (non aggregated) Level 1 and Level 2 are recommended for polling. For the most up-to-date data, consider using the websocket stream. **Caution**: Level 3 is only recommended for users wishing to maintain a full real-time order book using the websocket stream. Abuse of Level 3 via polling will cause your access to be limited or blocked. Args: product_id (str): Product level (Optional[int]): Order book level (1, 2, or 3). Default is 1. Returns: dict: Order book. Example for level 1:: { "sequence": "3", "bids": [ [ price, size, num-orders ], ], "asks": [ [ price, size, num-orders ], ] }
def do_call(self, path, method, body=None, headers=None): url = urljoin(self.base_url, path) try: resp = requests.request(method, url, data=body, headers=headers, auth=self.auth, timeout=self.timeout) except requests.exceptions.Timeout as out: raise NetworkError("Timeout while trying to connect to RabbitMQ") except requests.exceptions.RequestException as err: raise NetworkError("Error during request %s %s" % (type(err), err)) try: content = resp.json() except ValueError as out: content = None if resp.status_code < 200 or resp.status_code > 206: raise HTTPError(content, resp.status_code, resp.text, path, body) else: if content: return content else: return None
Send an HTTP request to the REST API. :param string path: A URL :param string method: The HTTP method (GET, POST, etc.) to use in the request. :param string body: A string representing any data to be sent in the body of the HTTP request. :param dictionary headers: "{header-name: header-value}" dictionary.
def extras_to_string(extras): if isinstance(extras, six.string_types): if extras.startswith("["): return extras else: extras = [extras] if not extras: return "" return "[{0}]".format(",".join(sorted(set(extras))))
Turn a list of extras into a string
def with_batch_norm_control(self, is_training, test_local_stats=True): return BatchNormLSTM.CoreWithExtraBuildArgs( self, is_training=is_training, test_local_stats=test_local_stats)
Wraps this RNNCore with the additional control input to the `BatchNorm`s. Example usage: lstm = snt.BatchNormLSTM(4) is_training = tf.placeholder(tf.bool) rnn_input = ... my_rnn = rnn.rnn(lstm.with_batch_norm_control(is_training), rnn_input) Args: is_training: Boolean that indicates whether we are in training mode or testing mode. When in training mode, the batch norm statistics are taken from the given batch, and moving statistics are updated. When in testing mode, the moving statistics are not updated, and in addition if `test_local_stats` is False then the moving statistics are used for the batch statistics. See the `BatchNorm` module for more details. test_local_stats: Boolean scalar indicated whether to use local batch statistics in test mode. Returns: snt.RNNCore wrapping this class with the extra input(s) added.