code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def setPhysicalMinimum(self, edfsignal, physical_minimum): if (edfsignal < 0 or edfsignal > self.n_channels): raise ChannelDoesNotExist(edfsignal) self.channels[edfsignal]['physical_min'] = physical_minimum self.update_header()
Sets the physical_minimum of signal edfsignal. Parameters ---------- edfsignal: int signal number physical_minimum: float Sets the physical minimum Notes ----- This function is required for every signal and can be called only after openin...
def scan(self): self.logger.info('{0} registered scan functions, starting {0} threads ' 'to scan candidate proxy lists...' .format(len(self.scan_funcs))) for i in range(len(self.scan_funcs)): t = threading.Thread( name=self.sc...
Start a thread for each registered scan function to scan proxy lists
def _remove_trailing_new_line(l): for n in sorted(new_lines_bytes, key=lambda x: len(x), reverse=True): if l.endswith(n): remove_new_line = slice(None, -len(n)) return l[remove_new_line] return l
Remove a single instance of new line at the end of l if it exists. Returns: bytestring
def get_keyid(keyname): if not keyname: return None keypairs = list_keypairs(call='function') keyid = keypairs[keyname]['id'] if keyid: return keyid raise SaltCloudNotFound('The specified ssh key could not be found.')
Return the ID of the keyname
def get_app_state(app_id): try: conn = get_conn() c = conn.cursor() c.execute("SELECT state FROM app WHERE id='{0}' ".format(app_id)) result = c.fetchone() conn.close() if result: state = result[0] return state else: return ...
get app state
def path(self, args, kw): params = self._pop_params(args, kw) if args or kw: raise InvalidArgumentError("Extra parameters (%s, %s) when building path for %s" % (args, kw, self.template)) return self.build_url(**params)
Builds the URL path fragment for this route.
def remove(self, fieldspec): pattern = r'(?P<field>[^.]+)(.(?P<subfield>[^.]+))?' match = re.match(pattern, fieldspec) if not match: return None grp = match.groupdict() for field in self.get_fields(grp['field']): if grp['subfield']: updated...
Removes fields or subfields according to `fieldspec`. If a non-control field subfield removal leaves no other subfields, delete the field entirely.
def create(self, **kwargs): url_str = self.base_url if 'tenant_id' in kwargs: url_str = url_str + '?tenant_id=%s' % kwargs['tenant_id'] del kwargs['tenant_id'] data = kwargs['jsonbody'] if 'jsonbody' in kwargs else kwargs body = self.client.create(url=url_str, jso...
Create a metric.
def get_clean_interp_index(arr, dim, use_coordinate=True, **kwargs): if use_coordinate: if use_coordinate is True: index = arr.get_index(dim) else: index = arr.coords[use_coordinate] if index.ndim != 1: raise ValueError( 'Coordi...
get index to use for x values in interpolation. If use_coordinate is True, the coordinate that shares the name of the dimension along which interpolation is being performed will be used as the x values. If use_coordinate is False, the x values are set as an equally spaced sequence.
def gets(self): ret = self.stdin.readline() if ret == '': raise EOFError return ret.rstrip('\n')
Read line from stdin. The trailing newline will be omitted. :return: string:
def follow(the_file): with open(the_file) as f: f.seek(0, 2) while True: line = f.readline() if not line: time.sleep(0.1) continue yield line
Follow a given file and yield new lines when they are available, like `tail -f`.
def Reset(self): self.state = "INITIAL" self.state_stack = [] self.buffer = "" self.error = 0 self.verbose = 0 self.processed = 0 self.processed_buffer = ""
Reset the lexer to process a new data feed.
def read_tsv(cls, file_path: str, gene_table: ExpGeneTable = None, encoding: str = 'UTF-8', sep: str = '\t'): matrix = cls(pd.read_csv(file_path, sep=sep, index_col=0, header=0, encoding=encoding)) ind = pd.read_csv(file_path, sep=sep, usecols=[0, ], hea...
Read expression matrix from a tab-delimited text file. Parameters ---------- file_path: str The path of the text file. gene_table: `ExpGeneTable` object, optional The set of valid genes. If given, the genes in the text file will be filtered against th...
def profile_delete(self): self.validate_profile_exists() profile_data = self.profiles.get(self.args.profile_name) fqfn = profile_data.get('fqfn') with open(fqfn, 'r+') as fh: data = json.load(fh) for profile in data: if profile.get('profile_name') ...
Delete an existing profile.
def hash_function(self): assert hasattr(self, 'f1') and hasattr(self, 'f2') f1, f2, g = self.f1, self.f2, self.g def czech_hash(word): v1 = f1(word) v2 = f2(word) return g[v1] + g[v2] return czech_hash
Returns the hash function proper. Ensures that `self` is not bound to the returned closure.
def unescape(self): for i, k in enumerate(self._html_escape_table): v = self._html_escape_table[k] self.obj = self.obj.replace(v, k) return self._wrap(self.obj)
Within an interpolation, evaluation, or escaping, remove HTML escaping that had been previously added.
def call(self, method_path, **kwargs): interface, method = method_path.split('.', 1) return getattr(getattr(self, interface), method)(**kwargs)
Make an API call for specific method :param method_path: format ``Interface.Method`` (e.g. ``ISteamWebAPIUtil.GetServerInfo``) :type method_path: :class:`str` :param kwargs: keyword arguments for the specific method :return: response :rtype: :class:`dict`, :class:`lxml.etree.Ele...
def parse_from_dict(json_dict): history_columns = json_dict['columns'] history_list = MarketHistoryList( upload_keys=json_dict['uploadKeys'], history_generator=json_dict['generator'], ) for rowset in json_dict['rowsets']: generated_at = parse_datetime(rowset['generatedAt']) ...
Given a Unified Uploader message, parse the contents and return a MarketHistoryList instance. :param dict json_dict: A Unified Uploader message as a dict. :rtype: MarketOrderList :returns: An instance of MarketOrderList, containing the orders within.
def _init_go2res(**kws): if 'goea_results' in kws: return {res.GO:res for res in kws['goea_results']} if 'go2nt' in kws: return kws['go2nt']
Initialize GOEA results.
def parse(file_path): _, ext = path.splitext(file_path) if ext in ('.yaml', '.yml'): func = yaml.load elif ext == '.json': func = json.load else: raise ValueError("Unrecognized config file type %s" % ext) with open(file_path, 'r') as f: return func(f)
Parse a YAML or JSON file.
def _normalize_file_paths(self, *args): paths = [] for arg in args: if arg is None: continue elif self._is_valid_file(arg): paths.append(arg) elif isinstance(arg, list) and all(self._is_valid_file(_) for _ in arg): paths...
Returns all given configuration file paths as one list.
def get_shell_folder (name): try: import _winreg as winreg except ImportError: import winreg lm = winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER) try: key = winreg.OpenKey(lm, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders") try: ret...
Get Windows Shell Folder locations from the registry.
def run(self, cmd): cmd = dict(cmd) client = 'minion' mode = cmd.get('mode', 'async') funparts = cmd.get('fun', '').split('.') if len(funparts) > 2 and funparts[0] in ['wheel', 'runner']: client = funparts[0] cmd['fun'] = '.'.join(funparts[1:]) if ...
Execute the salt command given by cmd dict. cmd is a dictionary of the following form: { 'mode': 'modestring', 'fun' : 'modulefunctionstring', 'kwarg': functionkeywordargdictionary, 'tgt' : 'targetpatternstring', 'tgt_type' : 'targetpatternty...
def get_resources(connection): resp = connection.describe(verbose=False).split('\r\n') resources = [x.replace('a=control:','') for x in resp if (x.find('control:') != -1 and x[-1] != '*' )] return resources
Do an RTSP-DESCRIBE request, then parse out available resources from the response
def bash_rule(bash, hostnames): if isinstance(bash, dict): return make_fail('bash_rule', error_message="Run this rule with a cluster archive") return make_pass('bash_rule', bash=bash, hostname=hostnames)
Cluster rule to process bash and hostname info ``bash`` and ``hostnames`` are Pandas DataFrames for the facts collected for each host in the cluster. See https://pandas.pydata.org/pandas-docs/stable/api.html#dataframe for information on available attributes and methods. Arguments: bash (p...
def run_example(example_name, environ): mod = EXAMPLE_MODULES[example_name] register_calendar("YAHOO", get_calendar("NYSE"), force=True) return run_algorithm( initialize=getattr(mod, 'initialize', None), handle_data=getattr(mod, 'handle_data', None), before_trading_start=getattr(mod,...
Run an example module from zipline.examples.
def dafopw(fname): fname = stypes.stringToCharP(fname) handle = ctypes.c_int() libspice.dafopw_c(fname, ctypes.byref(handle)) return handle.value
Open a DAF for subsequent write requests. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafopw_c.html :param fname: Name of DAF to be opened. :type fname: str :return: Handle assigned to DAF. :rtype: int
def set_tags(self, md5, tags): if isinstance(tags, str): tags = [tags] tag_set = set(tags) self.data_store.store_work_results({'tags': list(tag_set)}, 'tags', md5)
Set the tags for this sample
def get_mentions(self, docs=None, sort=False): result = [] if docs: docs = docs if isinstance(docs, (list, tuple)) else [docs] for mention_class in self.mention_classes: mentions = ( self.session.query(mention_class) .filter...
Return a list of lists of the mentions associated with this extractor. Each list of the return will contain the Mentions for one of the mention classes associated with the MentionExtractor. :param docs: If provided, return Mentions from these documents. Else, return all Mentions. ...
def items(self, *args, **kwargs): return self.get_stream()(self.get_object(*args, **kwargs))
Returns a queryset of Actions to use based on the stream method and object.
def load_template(path_or_buffer): from itertools import groupby from operator import itemgetter path_or_buffer = _stringify_path(path_or_buffer) if is_file_like(path_or_buffer): templates = json.load(path_or_buffer) else: with open(path_or_buffer, 'r') as f: templates = ...
Build tabula-py option from template file Args: file_like_obj: File like object of Tabula app template Returns: `obj`:dict: tabula-py options
def create_datapoint(value, timestamp=None, **tags): if timestamp is None: timestamp = time_millis() if type(timestamp) is datetime: timestamp = datetime_to_time_millis(timestamp) item = { 'timestamp': timestamp, 'value': value } if tags is not None: item['tags'] = t...
Creates a single datapoint dict with a value, timestamp and tags. :param value: Value of the datapoint. Type depends on the id's MetricType :param timestamp: Optional timestamp of the datapoint. Uses client current time if not set. Millisecond accuracy. Can be datetime instance also. :param tags: Optional ...
def on_button_release(self, event): self.queue_draw(self.view) x0, y0, x1, y1 = self.x0, self.y0, self.x1, self.y1 rectangle = (min(x0, x1), min(y0, y1), abs(x1 - x0), abs(y1 - y0)) selected_items = self.view.get_items_in_rectangle(rectangle, intersect=False) self.view.handle_new...
Select or deselect rubber banded groups of items The selection of elements is prior and never items are selected or deselected at the same time.
def minus(repo_list_a, repo_list_b): included = defaultdict(lambda: False) for repo in repo_list_b: included[repo.full_name] = True a_minus_b = list() for repo in repo_list_a: if not included[repo.full_name]: included[repo.full_name] = True ...
Method to create a list of repositories such that the repository belongs to repo list a but not repo list b. In an ideal scenario we should be able to do this by set(a) - set(b) but as GithubRepositories have shown that set() on them is not reliable resort to this until it is all sorted...
def combine_and_save(add_path_list, out_path): add_path_list = list(add_path_list) first_ds_path = add_path_list[0] print('Starting with {}'.format(first_ds_path)) combined = MLDataset(first_ds_path) for ds_path in add_path_list[1:]: try: combined = combined + MLDataset(ds_path) ...
Combines whatever datasets that can be combined, and save the bigger dataset to a given location.
def _add_logical_operator(self, operator): if not self.c_oper: raise QueryExpressionError("Logical operators must be preceded by an expression") self.current_field = None self.c_oper = None self.l_oper = inspect.currentframe().f_back.f_code.co_name self._query.append(...
Adds a logical operator in query :param operator: logical operator (str) :raise: - QueryExpressionError: if a expression hasn't been set
def power_status_update(self, POWER_STATUS): now = time.time() Vservo = POWER_STATUS.Vservo * 0.001 Vcc = POWER_STATUS.Vcc * 0.001 self.high_servo_voltage = max(self.high_servo_voltage, Vservo) if self.high_servo_voltage > 1 and Vservo < self.settings.servowarn: if no...
update POWER_STATUS warnings level
def combinePlinkBinaryFiles(prefixes, outPrefix): outputFile = None try: outputFile = open(outPrefix + ".files_to_merge", "w") except IOError: msg = "%(outPrefix)s.filesToMerge: can't write file" % locals() raise ProgramError(msg) for prefix in prefixes[1:]: print >>outpu...
Combine Plink binary files. :param prefixes: a list of the prefix of the files that need to be combined. :param outPrefix: the prefix of the output file (the combined file). :type prefixes: list :type outPrefix: str It uses Plink to merge a list of binary files (which is a li...
def add_header(self, name, value): if self.headers is None: self.headers = [] self.headers.append(dict(Name=name, Value=value))
Attach an email header to send with the message. :param name: The name of the header value. :param value: The header value.
def raise_db_exception(self): if not self.messages: raise tds_base.Error("Request failed, server didn't send error message") msg = None while True: msg = self.messages[-1] if msg['msgno'] == 3621: self.messages = self.messages[:-1] ...
Raises exception from last server message This function will skip messages: The statement has been terminated
def check_key(self, key: str) -> bool: keys = self.get_keys() return key in keys
Checks if key exists in datastore. True if yes, False if no. :param: SHA512 hash key :return: whether or key not exists in datastore
def tokenize(self, s): javabridge.call(self.jobject, "tokenize", "(Ljava/lang/String;)V", s) return TokenIterator(self)
Tokenizes the string. :param s: the string to tokenize :type s: str :return: the iterator :rtype: TokenIterator
def expired(self): self._data["_killed"] = True self.save() raise SessionExpired(self._config.expired_message)
Called when an expired session is atime
def stream_messages(self): if self._stream_messages is None: self._stream_messages = StreamMessageList( self._version, service_sid=self._solution['service_sid'], stream_sid=self._solution['sid'], ) return self._stream_messages
Access the stream_messages :returns: twilio.rest.sync.v1.service.sync_stream.stream_message.StreamMessageList :rtype: twilio.rest.sync.v1.service.sync_stream.stream_message.StreamMessageList
async def download_file_by_id(self, file_id: base.String, destination=None, timeout: base.Integer = 30, chunk_size: base.Integer = 65536, seek: base.Boolean = True): file = await self.get_file(file_id) return await self.download_file(fi...
Download file by file_id to destination if You want to automatically create destination (:class:`io.BytesIO`) use default value of destination and handle result of this method. :param file_id: str :param destination: filename or instance of :class:`io.IOBase`. For e. g. :class:`io.Byte...
def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'): ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position(yticks_position) ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize, pad=tick_padding, length=2, width=0.5) ...
Set standardized axis formatting for figure.
def generate_semantic_data_key(used_semantic_keys): semantic_data_id_counter = -1 while True: semantic_data_id_counter += 1 if "semantic data key " + str(semantic_data_id_counter) not in used_semantic_keys: break return "semantic data key " + str(semantic_data_id_counter)
Create a new and unique semantic data key :param list used_semantic_keys: Handed list of keys already in use :rtype: str :return: semantic_data_id
def send_message(self, message): if self.connected: self.send( json.dumps(message.request))
Send a message down the socket. The message is expected to have a `request` attribute that holds the message to be serialized and sent.
def _parse_title(line_iter, cur_line, conf): title = [] conf['title'].append(title) title.append(('title_name', cur_line.split('title', 1)[1].strip())) while (True): line = next(line_iter) if line.startswith("title "): return line cmd, opt = _parse_cmd(line) t...
Parse "title" in grub v1 config
def explicit_counts_map(self, pixels=None): if self.hpx._ipix is None: if self.data.ndim == 2: summed = self.counts.sum(0) if pixels is None: nz = summed.nonzero()[0] else: nz = pixels data_out = ...
return a counts map with explicit index scheme Parameters ---------- pixels : `np.ndarray` or None If set, grab only those pixels. If none, grab only non-zero pixels
def set_codes(self, codes): codemap = '' for cc in codes: cc = cc.upper() if cc in self.__ccodes: codemap += cc else: raise UnknownCountryCodeException(cc) self.codes = codemap
Set the country code map for the data. Codes given in a list. i.e. DE - Germany AT - Austria US - United States
def prepend_path_variable_command(variable, paths): assert isinstance(variable, basestring) assert is_iterable_typed(paths, basestring) return path_variable_setting_command( variable, paths + [expand_variable(variable)])
Returns a command that prepends the given paths to the named path variable on the current platform.
def write_relationships(self, file_name, flat=True): with open(file_name, 'w') as writer: if flat: self._write_relationships_flat(writer) else: self._write_relationships_non_flat(writer)
This module will output the eDNA tags which are used inside each calculation. If flat=True, data will be written flat, like: ADE1CA01, ADE1PI01, ADE1PI02 If flat=False, data will be written in the non-flat way, like: ADE1CA01, ADE1PI01 ADE1CA01, ADE1PI02 ...
def namer(cls, imageUrl, pageUrl): imgname = imageUrl.split('/')[-1] imgbase = imgname.rsplit('-', 1)[0] imgext = imgname.rsplit('.', 1)[1] return '%s.%s' % (imgbase, imgext)
Remove random junk from image names.
def isValid(folder, epoch=0): return os.path.exists(os.path.join(folder, str(epoch), "train", "silence.pkl"))
Check if the given folder is a valid preprocessed dataset
def _simplify_non_context_field_binary_composition(expression): if any((isinstance(expression.left, ContextField), isinstance(expression.right, ContextField))): raise AssertionError(u'Received a BinaryComposition {} with a ContextField ' u'operand. This should never ...
Return a simplified BinaryComposition if either operand is a TrueLiteral. Args: expression: BinaryComposition without any ContextField operand(s) Returns: simplified expression if the given expression is a disjunction/conjunction and one of it's operands is a TrueLiteral, and t...
def who_has(self, subid): answer = [] for name in self.__map: if subid in self.__map[name] and not name in answer: answer.append(name) return answer
Return a list of names who own subid in their id range set.
def formation_energy(self, chemical_potentials=None, fermi_level=0): chemical_potentials = chemical_potentials if chemical_potentials else {} chempot_correction = sum([ chem_pot * (self.bulk_structure.composition[el] - self.defect.defect_composition[el]) for el, chem_pot in chemi...
Computes the formation energy for a defect taking into account a given chemical potential and fermi_level
def run(options, http_req_handler = HttpReqHandler): global _HTTP_SERVER for x in ('server_version', 'sys_version'): if _OPTIONS.get(x) is not None: setattr(http_req_handler, x, _OPTIONS[x]) _HTTP_SERVER = threading_tcp_server.KillableThreadingHTTPServer( _OPTIONS,...
Start and execute the server
def include(self, pattern): found = [f for f in glob(pattern) if not os.path.isdir(f)] self.extend(found) return bool(found)
Include files that match 'pattern'.
def run(self, **kwargs): self.saveas('in.idf') idd = kwargs.pop('idd', self.iddname) epw = kwargs.pop('weather', self.epw) try: run(self, weather=epw, idd=idd, **kwargs) finally: os.remove('in.idf')
Run an IDF file with a given EnergyPlus weather file. This is a wrapper for the EnergyPlus command line interface. Parameters ---------- **kwargs See eppy.runner.functions.run()
def list_domains_by_service(self, service_id): content = self._fetch("/service/%s/domain" % service_id, method="GET") return map(lambda x: FastlyDomain(self, x), content)
List the domains within a service.
def summary(self): if self.features is not None: feature_count = len(self.features) else: feature_count = 0 feature_hash = 'feathash:' + str(hash(tuple(self.features))) return (str(self.estimator), feature_count, feature_hash, self.target)
Summary of model definition for labeling. Intended to be somewhat readable but unique to a given model definition.
def draw(self, **kwargs): labels = ("Training Score", "Cross Validation Score") curves = ( (self.train_scores_mean_, self.train_scores_std_), (self.test_scores_mean_, self.test_scores_std_), ) colors = resolve_colors(n_colors=2) for idx, (mean, std) in enu...
Renders the training and test learning curves.
def get_delay(self, planned, estimated): delay = 0 if estimated >= planned: delay = round((estimated - planned).seconds / 60) else: delay = round((planned - estimated).seconds / 60) * -1 return delay
Min of delay on planned departure.
def first(script, value=None, default=None, vars={}, url=None, opener=default_opener, library_paths=[]): return compile(script, vars, library_paths).first(_get_value(value, url, opener), default)
Transform object by jq script, returning the first result. Return default if result is empty.
def staticmap(ctx, mapid, output, features, lat, lon, zoom, size): access_token = (ctx.obj and ctx.obj.get('access_token')) or None if features: features = list( cligj.normalize_feature_inputs(None, 'features', [features])) service = mapbox.Static(access_token=access_token) try: ...
Generate static map images from existing Mapbox map ids. Optionally overlay with geojson features. $ mapbox staticmap --features features.geojson mapbox.satellite out.png $ mapbox staticmap --lon -61.7 --lat 12.1 --zoom 12 mapbox.satellite out2.png An access token is required, see `mapbox --help`.
def remove_rows_matching(df, column, match): df = df.copy() mask = df[column].values != match return df.iloc[mask, :]
Return a ``DataFrame`` with rows where `column` values match `match` are removed. The selected `column` series of values from the supplied Pandas ``DataFrame`` is compared to `match`, and those rows that match are removed from the DataFrame. :param df: Pandas ``DataFrame`` :param column: Column indexe...
def Flush(self): if self.locked and self.CheckLease() == 0: self._RaiseLockError("Flush") self._WriteAttributes() self._SyncAttributes() if self.parent: self.parent.Flush()
Syncs this object with the data store, maintaining object validity.
def on_add_rows(self, event): num_rows = self.rows_spin_ctrl.GetValue() for row in range(num_rows): self.grid.add_row() self.main_sizer.Fit(self)
add rows to grid
def get_entity(self, entity, default=None): self._ensure_loaded() return self.entities.get(str(entity), default)
Gets an entity object from the ACL. :type entity: :class:`_ACLEntity` or string :param entity: The entity to get lookup in the ACL. :type default: anything :param default: This value will be returned if the entity doesn't exist. :rtype: :class:`_ACLEnti...
def on_enter_specimen(self, event): new_specimen = self.specimens_box.GetValue() if new_specimen not in self.specimens: self.user_warning( "%s is not a valid specimen with measurement data, aborting" % (new_specimen)) self.specimens_box.SetValue(self.s) ...
upon enter on the specimen box it makes that specimen the current specimen
def default_resolve_fn(source, info, **args): name = info.field_name if isinstance(source, dict): property = source.get(name) else: property = getattr(source, name, None) if callable(property): return property() return property
If a resolve function is not given, then a default resolve behavior is used which takes the property of the source object of the same name as the field and returns it as the result, or if it's a function, returns the result of calling that function.
def handle_feedback(self, pkt): self.logger.debug("handle feedback") self.frame = self.decode_frameno(pkt.z & 0o7777) - 1 self.server.controller.init_frame(self.frame) self.server.controller.set_frame(self.frame)
This part of the protocol is used by IRAF to erase a frame in the framebuffers.
def create_initialized_contract_account(self, contract_code, storage) -> None: new_account = Account( self._generate_new_address(), code=contract_code, balance=0 ) new_account.storage = storage self._put_account(new_account)
Creates a new contract account, based on the contract code and storage provided The contract code only includes the runtime contract bytecode. :param contract_code: Runtime bytecode for the contract :param storage: Initial storage for the contract :return: The new account
def update_in_hdx(self, **kwargs): self._check_load_existing_object('resource', 'id') if self.file_to_upload and 'url' in self.data: del self.data['url'] self._merge_hdx_update('resource', 'id', self.file_to_upload, **kwargs)
Check if resource exists in HDX and if so, update it Args: **kwargs: See below operation (string): Operation to perform eg. patch. Defaults to update. Returns: None
def _strip_marker_elem(elem_name, elements): extra_indexes = [] preceding_operators = ["and"] if elem_name == "extra" else ["and", "or"] for i, element in enumerate(elements): if isinstance(element, list): cancelled = _strip_marker_elem(elem_name, element) if cancelled: ...
Remove the supplied element from the marker. This is not a comprehensive implementation, but relies on an important characteristic of metadata generation: The element's operand is always associated with an "and" operator. This means that we can simply remove the operand and the "and" operator associate...
def _high_dim_sim(self, v, w, normalize=False, X=None, idx=0): sim = np.exp((-np.linalg.norm(v - w) ** 2) / (2*self._sigma[idx] ** 2)) if normalize: return sim / sum(map(lambda x: x[1], self._knn(idx, X, high_dim=True))) else: return sim
Similarity measurement based on Gaussian Distribution
def getMessage(self): if isinstance(self.msg, numpy.ndarray): msg = self.array2string(self.msg) else: msg = str(self.msg) if self.args: a2s = self.array2string if isinstance(self.args, Dict): args = {k: (a2s(v) if isinstance(v, nump...
Return the message for this LogRecord. Return the message for this LogRecord after merging any user-supplied \ arguments with the message.
def get_resource_search_session(self, proxy): if not self.supports_resource_search(): raise errors.Unimplemented() return sessions.ResourceSearchSession(proxy=proxy, runtime=self._runtime)
Gets a resource search session. arg: proxy (osid.proxy.Proxy): a proxy return: (osid.resource.ResourceSearchSession) - ``a ResourceSearchSession`` raise: NullArgument - ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimple...
def hist_axis_func(axis_type: enum.Enum) -> Callable[[Hist], Axis]: def axis_func(hist: Hist) -> Axis: try: hist_axis_type = axis_type.value except AttributeError: hist_axis_type = axis_type if hasattr(hist, "ProjectionND") and hasattr(hist, "Projection"): ...
Wrapper to retrieve the axis of a given histogram. This can be convenient outside of just projections, so it's made available in the API. Args: axis_type: The type of axis to retrieve. Returns: Callable to retrieve the specified axis when given a hist.
def to_yaml(obj, stream=None, dumper_cls=yaml.Dumper, default_flow_style=False, **kwargs): class OrderedDumper(dumper_cls): pass def dict_representer(dumper, data): return dumper.represent_mapping( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, data.items()) ...
Serialize a Python object into a YAML stream with OrderedDict and default_flow_style defaulted to False. If stream is None, return the produced string instead. OrderedDict reference: http://stackoverflow.com/a/21912744 default_flow_style reference: http://stackoverflow.com/a/18210750 :param data:...
def pop_arguments(instr, stack): needed = instr.stack_effect if needed >= 0: raise DecompilationError( "%s is does not have a negative stack effect" % instr ) for popcount, to_pop in enumerate(reversed(stack), start=1): needed += to_pop.stack_effect if not needed:...
Pop instructions off `stack` until we pop all instructions that will produce values popped by `instr`.
def removeIndividual(self, individual): q = models.Individual.delete().where( models.Individual.id == individual.getId()) q.execute()
Removes the specified individual from this repository.
def on_action_run(self, task_vars, delegate_to_hostname, loader_basedir): self.inventory_hostname = task_vars['inventory_hostname'] self._task_vars = task_vars self.host_vars = task_vars['hostvars'] self.delegate_to_hostname = delegate_to_hostname self.loader_basedir = loader_bas...
Invoked by ActionModuleMixin to indicate a new task is about to start executing. We use the opportunity to grab relevant bits from the task-specific data. :param dict task_vars: Task variable dictionary. :param str delegate_to_hostname: :data:`None`, or the templ...
def get_future(self): now = dt.now() four_days = now + timedelta(hours=96) now = now.timestamp() four_days = four_days.timestamp() url = build_url(self.api_key, self.spot_id, self.fields, self.unit, now, four_days) return get_msw(url)
Get current and future forecasts.
def group_select(selects, length=None, depth=None): if length == None and depth == None: length = depth = len(selects[0]) getter = operator.itemgetter(depth-length) if length > 1: selects = sorted(selects, key=getter) grouped_selects = defaultdict(dict) for k, v in itertools....
Given a list of key tuples to select, groups them into sensible chunks to avoid duplicating indexing operations.
def show_distribution_section(config, title, section_name): payload = requests.get(config.apps_url).json() distributions = sorted(payload.keys(), reverse=True) latest_distribution = payload[distributions[0]] click.echo("{} {}".format("Release".rjust(7), title)) click.echo("------- ---------------") ...
Obtain distribution data and display latest distribution section, i.e. "demos" or "apps" or "themes".
def from_remote_hive(cls, url, *args, **kwargs): version = kwargs.pop('version', None) require = kwargs.pop('require_https', False) return cls(Hive.from_url(url, version, require), *args, **kwargs)
Download a JSON hive file from a URL, and initialize from it, paying attention to the version keyword argument.
def download_page(url, data=None): conn = urllib2.urlopen(url, data) resp = conn.read() conn.close() return resp
Returns the response for the given url. The optional data argument is passed directly to urlopen.
def shell(environment, opts): environment.require_data() environment.start_supporting_containers() return environment.interactive_shell( opts['COMMAND'], detach=opts['--detach'] )
Run a command or interactive shell within this environment Usage: datacats [-d] [-s NAME] shell [ENVIRONMENT [COMMAND...]] Options: -d --detach Run the resulting container in the background -s --site=NAME Specify a site to run the shell on [default: primary] ENVIRONMENT may be an environment name or a ...
def independentlinear60(display=False): old_seed = np.random.seed() np.random.seed(0) N = 1000 M = 60 beta = np.zeros(M) beta[0:30:3] = 1 f = lambda X: np.matmul(X, beta) X_start = np.random.randn(N, M) X = X_start - X_start.mean(0) y = f(X) + np.random.randn(N) * 1e-2 np.ran...
A simulated dataset with tight correlations among distinct groups of features.
def _set_default_serializer(self, name): try: (self._default_content_type, self._default_content_encoding, self._default_encode) = self._encoders[name] except KeyError: raise SerializerNotInstalled( "No encoder installed for %s" % name)
Set the default serialization method used by this library. :param name: The name of the registered serialization method. For example, ``json`` (default), ``pickle``, ``yaml``, or any custom methods registered using :meth:`register`. :raises SerializerNotInstalled: If the serial...
def page_view(url): def decorator(func): @wraps(func) async def wrapper(self: BaseState, *args, **kwargs): user_id = self.request.user.id try: user_lang = await self.request.user.get_locale() except NotImplementedError: user_lang = ...
Page view decorator. Put that around a state handler function in order to log a page view each time the handler gets called. :param url: simili-URL that you want to give to the state
def __symlink_dir(self, dir_name, name, path): target_dir = os.path.join(self.root_dir, dir_name) if not os.path.exists(target_dir): os.makedirs(target_dir) target_path = os.path.join(self.root_dir, dir_name, name) logger.debug("Attempting to symlink %s to %s..." % (path, tar...
Symlink an object at path to name in the dir_name folder. remove it if it already exists.
def do_help(self, arg): if not arg or arg not in self.argparse_names(): cmd.Cmd.do_help(self, arg) else: try: self.argparser.parse_args([arg, '--help']) except Exception: pass
Patched to show help for arparse commands
def is_integer(obj): if PYTHON3: return isinstance(obj, int) return isinstance(obj, (int, long))
Is this an integer. :param object obj: :return:
def __get_connection_SNS(): region = get_global_option('region') try: if (get_global_option('aws_access_key_id') and get_global_option('aws_secret_access_key')): logger.debug( 'Authenticating to SNS using ' 'credentials in configuration file') ...
Ensure connection to SNS
def ami_lookup(region='us-east-1', name='tomcat8'): if AMI_JSON_URL: ami_dict = _get_ami_dict(AMI_JSON_URL) ami_id = ami_dict[region][name] elif GITLAB_TOKEN: warn_user('Use AMI_JSON_URL feature instead.') ami_contents = _get_ami_file(region=region) ami_dict = json.loads(...
Look up AMI ID. Use _name_ to find AMI ID. If no ami_base_url or gitlab_token is provided, _name_ is returned as the ami id. Args: region (str): AWS Region to find AMI ID. name (str): Simple AMI base name to lookup. Returns: str: AMI ID for _name_ in _region_.
def width(self): if self._width is not None: return self._width self._width = sum(fs.width for fs in self.chunks) return self._width
The number of columns it would take to display this string