code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def _checkMetadata(self, variantFile): metadata = self._getMetadataFromVcf(variantFile) if self._metadata is not None and self._metadata != metadata: raise exceptions.InconsistentMetaDataException( variantFile.filename)
Checks that metadata is consistent
def delete(self, id): id = self.as_id(id) response = self.http.delete( '%s/%s' % (self.api_url, id), auth=self.auth) response.raise_for_status()
Delete a component by id
def delete(self): return self.bucket.delete_key(self.name, version_id=self.version_id)
Delete this key from S3
def from_timestamp(timestamp, tz_offset): utc_dt = datetime.fromtimestamp(timestamp, utc) try: local_dt = utc_dt.astimezone(tzoffset(tz_offset)) return local_dt except ValueError: return utc_dt
Converts a timestamp + tz_offset into an aware datetime instance.
def module_is_imported(modname, scope=None): if not module_is_in_cache(modname): return False if scope is None: scope = inspect.stack()[1][0].f_globals for m in scope.values(): if isinstance(m, type(sys)) and m.__name__ == modname: return True return False
Checks if a module is imported within the current namespace.
def _open_dataset(self, urlpath): import dask.dataframe if self.pattern is None: self._dataframe = dask.dataframe.read_csv( urlpath, storage_options=self._storage_options, **self._csv_kwargs) return if not (DASK_VERSION >= '0.19.0'): raise ValueError("Your version of dask is '{}'. " "The ability to include filenames in read_csv output " "(``include_path_column``) was added in 0.19.0, so " "pattern urlpaths are not supported.".format(DASK_VERSION)) drop_path_column = 'include_path_column' not in self._csv_kwargs path_column = self._path_column() self._dataframe = dask.dataframe.read_csv( urlpath, storage_options=self._storage_options, **self._csv_kwargs) self._set_pattern_columns(path_column) if drop_path_column: self._dataframe = self._dataframe.drop([path_column], axis=1)
Open dataset using dask and use pattern fields to set new columns
def _link_to(self, linked_picker): yformat = self.config['options']['format'].replace('-01-01', '-12-31') self.config['options']['format'] = yformat
Customize the options when linked with other date-time input
def _add_auth_headers(self, base): if 'access_token' in self.creds: return _extend(base, { 'authorization': 'Bearer ' + self.creds['access_token'] }) return base
Attach the acces_token to a request.
def savecache(apicache, json_file): if apicache is None or apicache is {}: return "" apicachestr = json.dumps(apicache, indent=2) with open(json_file, 'w') as cache_file: cache_file.write(apicachestr) return apicachestr
Saves apicache dictionary as json_file, returns dictionary as indented str
def package_remove(name): cmd = 'pkg_remove ' + name out, err = DETAILS['server'].sendline(cmd) return parse(out)
Remove a "package" on the ssh server
def nbins(self, axis=0, overflow=False): if axis == 0: nbins = self.GetNbinsX() elif axis == 1: nbins = self.GetNbinsY() elif axis == 2: nbins = self.GetNbinsZ() else: raise ValueError("axis must be 0, 1, or 2") if overflow: nbins += 2 return nbins
Get the number of bins along an axis
def _get_section(self, event): sentence_id = event.get('sentence') section = None if sentence_id: qstr = "$.sentences.frames[(@.frame_id is \'%s\')]" % sentence_id res = self.tree.execute(qstr) if res: sentence_frame = list(res)[0] passage_id = sentence_frame.get('passage') if passage_id: qstr = "$.sentences.frames[(@.frame_id is \'%s\')]" % \ passage_id res = self.tree.execute(qstr) if res: passage_frame = list(res)[0] section = passage_frame.get('section-id') if section in self._section_list: return section elif section.startswith('fig'): return 'figure' elif section.startswith('supm'): return 'supplementary' elif section == 'article-title': return 'title' elif section in ['subjects|methods', 'methods|subjects']: return 'methods' elif section == 'conclusions': return 'conclusion' elif section == 'intro': return 'introduction' else: return None
Get the section of the paper that the event is from.
def _directory_prefix(self): if self.downder == "wget": self.dir_prefix = "--directory-prefix=" elif self.downder == "aria2c": self.dir_prefix = "--dir="
Downloader options for specific directory
def prepare(cls, options, round_manager): super(NodeResolve, cls).prepare(options, round_manager) for resolver in cls._resolver_by_type.values(): resolver.prepare(options, round_manager)
Allow each resolver to declare additional product requirements.
def report_missing_dependencies(self): missing_deps = dependencies.missing_dependencies() if missing_deps: QMessageBox.critical(self, _('Error'), _("<b>You have missing dependencies!</b>" "<br><br><tt>%s</tt><br><br>" "<b>Please install them to avoid this message.</b>" "<br><br>" "<i>Note</i>: Spyder could work without some of these " "dependencies, however to have a smooth experience when " "using Spyder we <i>strongly</i> recommend you to install " "all the listed missing dependencies.<br><br>" "Failing to install these dependencies might result in bugs. " "Please be sure that any found bugs are not the direct " "result of missing dependencies, prior to reporting a new " "issue." ) % missing_deps, QMessageBox.Ok)
Show a QMessageBox with a list of missing hard dependencies
def ordered(self): active, inactive = self.active_inactive order = active + inactive return UnitCell(self.matrix[:,order], self.active[order])
An equivalent unit cell with the active cell vectors coming first
def stop_listening(self): self._halt_threads = True for name, queue_waker in self.recieved_signals.items(): q, wake_event = queue_waker wake_event.set()
Stop listener threads for acquistion queues
def write_hier(self, GO_id, out=sys.stdout, len_dash=1, max_depth=None, num_child=None, short_prt=False, include_only=None, go_marks=None): gos_printed = set() self[GO_id].write_hier_rec(gos_printed, out, len_dash, max_depth, num_child, short_prt, include_only, go_marks)
Write hierarchy for a GO Term.
def distance(self, other): return distance((self.separation, self.pa), (other.separation, other.pa))
Coordinate distance from another ObsNode
def create_new_csv(samples, args): out_fn = os.path.splitext(args.csv)[0] + "-merged.csv" logger.info("Preparing new csv: %s" % out_fn) with file_transaction(out_fn) as tx_out: with open(tx_out, 'w') as handle: handle.write(_header(args.csv)) for s in samples: sample_name = s['name'] if isinstance(s['out_file'], list) else os.path.basename(s['out_file']) handle.write("%s,%s,%s\n" % (sample_name, s['name'], ",".join(s['anno'])))
create csv file that can be use with bcbio -w template
def check_can_approve(self, request, application, roles): try: authorised_persons = self.get_authorised_persons(application) authorised_persons.get(pk=request.user.pk) return True except Person.DoesNotExist: return False
Check the person's authorization.
def add_to_path(p): if p not in os.environ["PATH"]: os.environ["PATH"] = "{0}{1}{2}".format(p, os.pathsep, os.environ["PATH"])
Adds a given path to the PATH.
def insert_empty_rows(self, y: int, amount: int = 1) -> None: def transform_rows( column: Union[int, float], row: Union[int, float] ) -> Tuple[Union[int, float], Union[int, float]]: return column, row + (amount if row >= y else 0) self._transform_coordinates(transform_rows)
Insert a number of rows after the given row.
def x_build_timestamp( self, node ): self.timestamps.append(self.get_data(node).strip()) return None
The time-stamp goes to the corresponding attribute in the result.
def strict_defaults(fn): @wraps(fn) def wrapper(*args, **kwargs): defaults = _get_default_args(fn) needed_types={ key:type(defaults[key]) for key in defaults } arg_names=_get_arg_names(fn) assert not len(arg_names) - len(fn.__defaults__), '{} needs default variables on all arguments'.format(fn.__name__) for i in range(len(args)): if args[i] not in kwargs.keys(): kwargs[arg_names[i]]=args[i] for name in needed_types: assert isinstance(kwargs[name],needed_types[name]), 'got {} and expected a {}'.format(kwargs[name],needed_types[name]) return fn(**kwargs) return wrapper
use this decorator to enforce type checking on functions based on the function's defaults
def show_all(self, as_string=True): result = [] for item in self.container: pattern = str(item[0])[10:] if PY3 else item[0].pattern instances = item[2] or [] value = ( '%s "%s"' % (item[1].__name__, (item[1].__doc__ or "")) if callable(item[1]) else str(item[1]) ) value = "%s %s" % (type(item[1]), value) result.append(" => ".join((pattern, ",".join(instances), value))) return "\n".join(result) if as_string else result
, python2 will not show flags
def create_html(api_key, attrs): gif = get_gif(api_key, attrs['gif_id']) if 'alt' not in attrs.keys(): attrs['alt'] = 'source: {}'.format(gif['data']['source']) html_out = '<a href="{}">'.format(gif['data']['url']) html_out += '<img src="{}" alt="{}">'.format( gif['data']['images']['original']['url'], attrs['alt']) html_out += '</a>' return html_out
Returns complete html tag string.
def to_aws_format(tags): if TAG_RAY_NODE_NAME in tags: tags["Name"] = tags[TAG_RAY_NODE_NAME] del tags[TAG_RAY_NODE_NAME] return tags
Convert the Ray node name tag to the AWS-specific 'Name' tag.
def dump_yaml(data, Dumper=_Dumper, default_flow_style=False): content = yaml.dump(data, default_flow_style=default_flow_style, Dumper=Dumper) return content.strip()
Returns data as yaml-formatted string.
def _edits1(word: str) -> Set[str]: splits = [(word[:i], word[i:]) for i in range(len(word) + 1)] deletes = [L + R[1:] for L, R in splits if R] transposes = [L + R[1] + R[0] + R[2:] for L, R in splits if len(R) > 1] replaces = [L + c + R[1:] for L, R in splits if R for c in thai_letters] inserts = [L + c + R for L, R in splits for c in thai_letters] return set(deletes + transposes + replaces + inserts)
Return a set of words with edit distance of 1 from the input word
def commit_update(): unused = _find_unused_partition() new = _switch_partition() if new != unused: msg = f"Bad switch: switched to {new} when {unused} was unused" LOG.error(msg) raise RuntimeError(msg) else: LOG.info(f'commit_update: committed to booting {new}')
Switch the target boot partition.
def _hmac_auth(self): return TcExHmacAuth(self.args.api_access_id, self.args.api_secret_key, self.tcex.log)
Add ThreatConnect HMAC Auth to Session.
def connect(self, (host, port)): super(GeventTransport, self).connect((host, port), klass=socket.socket)
Connect using a host,port tuple
def make(collector, image, **kwargs): tag = kwargs.get("artifact", NotSpecified) if tag is NotSpecified: tag = collector.configuration["harpoon"].tag if tag is not NotSpecified: image.tag = tag Builder().make_image(image, collector.configuration["images"]) print("Created image {0}".format(image.image_name))
Just create an image
def isInstalledBuild(self): sentinelFile = os.path.join(self.getEngineRoot(), 'Engine', 'Build', 'InstalledBuild.txt') return os.path.exists(sentinelFile)
Determines if the Engine is an Installed Build
def _unpack_v1(h5file): points = np.array(h5file['points']) groups = np.array(h5file['structure']) return points, groups
Unpack groups from HDF5 v1 file
def ensure_dtype(core, dtype, dtype_): core = core.copy() if dtype is None: dtype = dtype_ if dtype_ == dtype: return core, dtype for key, val in { int: chaospy.poly.typing.asint, float: chaospy.poly.typing.asfloat, np.float32: chaospy.poly.typing.asfloat, np.float64: chaospy.poly.typing.asfloat, }.items(): if dtype == key: converter = val break else: raise ValueError("dtype not recognised (%s)" % str(dtype)) for key, val in core.items(): core[key] = converter(val) return core, dtype
Ensure dtype is correct.
def __track_job(self): while not self.__verify_job_has_started(): time.sleep(self.__POLL_TIME) self.__logger.debug("Waiting for Kubernetes job " + self.uu_name + " to start") self.__print_kubectl_hints() status = self.__get_job_status() while status == "RUNNING": self.__logger.debug("Kubernetes job " + self.uu_name + " is running") time.sleep(self.__POLL_TIME) status = self.__get_job_status() assert status != "FAILED", "Kubernetes job " + self.uu_name + " failed" self.__logger.info("Kubernetes job " + self.uu_name + " succeeded") self.signal_complete()
Poll job status while active
def _get_description(self, args: Tuple, kwargs: Dict[str, Any]) -> Dict[str, Any]: return { 'id': uuid1().hex, 'args': args, 'kwargs': kwargs, 'module': self._module_name, 'function': self.f.__name__, 'sender_hostname': socket.gethostname(), 'sender_pid': os.getpid(), 'sender_cmd': ' '.join(sys.argv), 'sender_timestamp': datetime.utcnow().isoformat()[:19], }
Return the dictionary to be sent to the queue.
def _linux_iqn(): ret = [] initiator = '/etc/iscsi/initiatorname.iscsi' try: with salt.utils.files.fopen(initiator, 'r') as _iscsi: for line in _iscsi: line = line.strip() if line.startswith('InitiatorName='): ret.append(line.split('=', 1)[1]) except IOError as ex: if ex.errno != errno.ENOENT: log.debug("Error while accessing '%s': %s", initiator, ex) return ret
Return iSCSI IQN from a Linux host.
def _GetRunFlowFlags(args=None): import argparse parser = argparse.ArgumentParser(parents=[tools.argparser]) flags, _ = parser.parse_known_args(args=args) if hasattr(FLAGS, 'auth_host_name'): flags.auth_host_name = FLAGS.auth_host_name if hasattr(FLAGS, 'auth_host_port'): flags.auth_host_port = FLAGS.auth_host_port if hasattr(FLAGS, 'auth_local_webserver'): flags.noauth_local_webserver = (not FLAGS.auth_local_webserver) return flags
Retrieves command line flags based on gflags module.
def _color(self, color, msg): if self.useColor: return '{0}{1}{2}'.format(color, msg, self.RESET_SEQ) else: return msg
Converts a message to be printed to the user's terminal in red
def slots(self): if self.implied: return () data = clips.data.DataObject(self._env) lib.EnvDeftemplateSlotNames(self._env, self._tpl, data.byref) return tuple( TemplateSlot(self._env, self._tpl, n.encode()) for n in data.value)
Iterate over the Slots of the Template.
def load_data(self, filename): with zopen(filename, "rt") as f: self._data = json.load(f, cls=MontyDecoder)
Load assimilated data from a file
def _deprecation_notice(cls): _deprecation_msg = ( '{name} {type} is deprecated. ' 'It will be removed in the next version. ' 'Use saml2.cryptography.symmetric instead.' ).format(name=cls.__name__, type=type(cls).__name__) _warnings.warn(_deprecation_msg, DeprecationWarning)
Warn about deprecation of this class.
def namedb_get_all_importing_namespace_hashes( self, current_block ): query = "SELECT preorder_hash FROM namespaces WHERE (op = ? AND reveal_block < ?) OR (op = ? AND block_number < ?);" args = (NAMESPACE_REVEAL, current_block + NAMESPACE_REVEAL_EXPIRE, NAMESPACE_PREORDER, current_block + NAMESPACE_PREORDER_EXPIRE ) namespace_rows = namedb_query_execute( cur, query, args ) ret = [] for namespace_row in namespace_rows: ret.append( namespace_row['preorder_hash'] ) return ret
Get the list of all non-expired preordered and revealed namespace hashes.
def canonical_clamav_conf (): if os.name == 'posix': clamavconf = "/etc/clamav/clamd.conf" elif os.name == 'nt': clamavconf = r"c:\clamav-devel\etc\clamd.conf" else: clamavconf = "clamd.conf" return clamavconf
Default clamav configs for various platforms.
def today(year=None): return datetime.date(int(year), _date.month, _date.day) if year else _date
this day, last year
def main(options): client = Client(server=options.server, username=options.username, password=options.password) print('Successfully connected to %s' % client.server) print(client.si.CurrentTime()) client.logout()
A simple connection test to login and print the server time.
def __parse_precipfc_data(data, timeframe): result = {AVERAGE: None, TOTAL: None, TIMEFRAME: None} log.debug("Precipitation data: %s", data) lines = data.splitlines() index = 1 totalrain = 0 numberoflines = 0 nrlines = min(len(lines), round(float(timeframe) / 5) + 1) while index < nrlines: line = lines[index] log.debug("__parse_precipfc_data: line: %s", line) (val, key) = line.split("|") mmu = 10**(float((int(val) - 109)) / 32) totalrain = totalrain + float(mmu) numberoflines = numberoflines + 1 index += 1 if numberoflines > 0: result[AVERAGE] = round((totalrain / numberoflines), 2) else: result[AVERAGE] = 0 result[TOTAL] = round(totalrain / 12, 2) result[TIMEFRAME] = timeframe return result
Parse the forecasted precipitation data.
def types(self): res = [] for column in self.column_definitions: tmp = column.get('type', None) res.append(ModelCompiler.get_column_type(tmp)) if tmp else False res = list(set(res)) return res
All the unique types found in user supplied model
def _create_base_ensemble(self, out, n_estimators, n_folds): ensemble_scores = numpy.empty((n_estimators, n_folds)) base_ensemble = numpy.empty_like(ensemble_scores, dtype=numpy.object) for model, fold, score, est in out: ensemble_scores[model, fold] = score base_ensemble[model, fold] = est return ensemble_scores, base_ensemble
For each base estimator collect models trained on each fold
def configure(**kwargs): for key in kwargs: if key == 'is_logging_enabled': Event.is_logging_enabled = kwargs[key] elif key == 'collector_queue': Event.collector_queue = kwargs[key] else: Logger.get_logger(__name__).error("Unknown key %s in configure or bad type %s", key, type(kwargs[key]))
Global configuration for event handling.
def make_messages(context: Context, javascript=False, fuzzy=False): kwargs = { 'all': True, 'keep_pot': True, 'no_wrap': True, } if fuzzy: kwargs['allow_fuzzy'] = True if javascript: kwargs.update(domain='djangojs', ignore_patterns=['*.bundle.js']) with in_dir(context.app.django_app_name): return context.management_command('makemessages', **kwargs)
Collects text into translation source files
def show_message(self, message, timeout=0): self.main.statusBar().showMessage(message, timeout)
Show message in main window's status bar
def iter_points(self): "returns a list of tuples of names and values" if not self.is_discrete(): raise ValueError("Patch is not discrete") names = sorted(self.sets.keys()) icoords = [self.sets[name].iter_members() for name in names] for coordinates in product(*icoords): yield tuple(zip(names,coordinates))
returns a list of tuples of names and values
def _covar_mstep_full(gmm, X, responsibilities, weighted_X_sum, norm, min_covar): n_features = X.shape[1] cv = np.empty((gmm.n_components, n_features, n_features)) for c in range(gmm.n_components): post = responsibilities[:, c] mu = gmm.means_[c] diff = X - mu with np.errstate(under='ignore'): avg_cv = np.dot(post * diff.T, diff) / (post.sum() + 10 * EPS) cv[c] = avg_cv + min_covar * np.eye(n_features) return cv
Performing the covariance M step for full cases
def urlretrieve(url, dest, write_mode="w"): response = urllib2.urlopen(url) mkdir_recursive(os.path.dirname(dest)) with open(dest, write_mode) as f: f.write(response.read()) f.close()
save a file to disk from a given url
def _analyse_status_type(line): spaces = _count_spaces_startswith(line) if spaces is None: return '' switch = { 0: 'RESOURCE', 2: {' disk:': 'LOCALDISK', ' role:': 'PEERNODE', ' connection:': 'PEERNODE'}, 4: {' peer-disk:': 'PEERDISK'} } ret = switch.get(spaces, 'UNKNOWN') if isinstance(ret, six.text_type): return ret for x in ret: if x in line: return ret[x] return 'UNKNOWN'
Figure out the sections in drbdadm status
def assert_is_substring(substring, subject, message=None, extra=None): assert ( (subject is not None) and (substring is not None) and (subject.find(substring) != -1) ), _assert_fail_message(message, substring, subject, "is not in", extra)
Raises an AssertionError if substring is not a substring of subject.
def _open_ftp(self): _ftp = FTP() _ftp.set_debuglevel(0) with ftp_errors(self): _ftp.connect(self.host, self.port, self.timeout) _ftp.login(self.user, self.passwd, self.acct) self._features = {} try: feat_response = _decode(_ftp.sendcmd("FEAT"), "latin-1") except error_perm: self.encoding = "latin-1" else: self._features = self._parse_features(feat_response) self.encoding = "utf-8" if "UTF8" in self._features else "latin-1" if not PY2: _ftp.file = _ftp.sock.makefile( "r", encoding=self.encoding ) _ftp.encoding = self.encoding self._welcome = _ftp.welcome return _ftp
Open a new ftp object.
def _check_field_names_unique(self, existing_item, patched_item): existing_fields_by_name = {f.name: f for f in existing_item.fields} for patched_field in patched_item.fields: if patched_field.name in existing_fields_by_name.keys(): existing_field = existing_fields_by_name[patched_field.name] raise InvalidSpec('Patched field {} overrides pre-existing field in {} ({}:{}).' .format(quote(patched_field.name), quote(patched_item.name), existing_field.path, existing_field.lineno), patched_field.lineno, patched_field.path)
Enforces that patched fields don't already exist.
def _format_extname(self, ext): if ext is None: outs = ext else: outs = '{0},{1}'.format(ext[0], ext[1]) return outs
Pretty print given extension name and number tuple.
def _represent_undefined(self, data): raise RepresenterError( _format("Cannot represent an object: {0!A} of type: {1}; " "yaml_representers: {2!A}, " "yaml_multi_representers: {3!A}", data, type(data), self.yaml_representers.keys(), self.yaml_multi_representers.keys()))
Raises flag for objects that cannot be represented
def add(self, post_id): post_data = self.get_post_data() post_data['user_name'] = self.userinfo.user_name post_data['user_id'] = self.userinfo.uid post_data['post_id'] = post_id replyid = MReply.create_reply(post_data) if replyid: out_dic = {'pinglun': post_data['cnt_reply'], 'uid': replyid} logger.info('add reply result dic: {0}'.format(out_dic)) return json.dump(out_dic, self)
Adding reply to a post.
def copy_resource(self, resource, targetdir): final_path = resource.final_path() if final_path[0] == '/' or (':' in final_path) or ('?' in final_path): return source_path = self.get_source_path(resource) if resource.resource_type == DIRECTORY_RESOURCE: for file_path in glob.iglob(os.path.join(source_path, '**'), recursive=True): if os.path.isdir(file_path): continue rest_target_path = file_path[len(source_path)+1:] target_path = os.path.join(targetdir, final_path, rest_target_path) self._copy_file(file_path, target_path) else: target_path = os.path.join(targetdir, final_path) yield self._copy_file(source_path, target_path)
Copies a resource file and returns the source path for monitoring
def extract_terms(self, nb): emt = ExtractMetatabTerms() emt.preprocess(nb, {}) return emt.terms
Extract some term values, usually set with tags or metadata
def csw_global_dispatch_by_catalog(request, catalog_slug): catalog = get_object_or_404(Catalog, slug=catalog_slug) if catalog: url = settings.SITE_URL.rstrip('/') + request.path.rstrip('/') return csw_global_dispatch(request, url=url, catalog_id=catalog.id)
pycsw wrapper for catalogs
def union(self, key, *others): if not isinstance(key, str): raise ValueError("String expected.") self.db.sunionstore(key, [self.key] + [o.key for o in others]) return Set(key)
Return a new set with elements from the set and all others.
def csch(x, context=None): return _apply_function_in_current_context( BigFloat, mpfr.mpfr_csch, (BigFloat._implicit_convert(x),), context, )
Return the hyperbolic cosecant of x.
def _get_ssh_public_key(self): key = ipa_utils.generate_public_ssh_key(self.ssh_private_key_file) return '{user}:{key} {user}'.format( user=self.ssh_user, key=key.decode() )
Generate SSH public key from private key.
def access_token(self) -> str: if self._token_expires_at < time.time() + self._REFRESH_THRESHOLD: self.request_token() return self._token["access_token"]
The access token stored within the requested token.
def example_reading_spec(self): processed_reward_type = tf.float32 if self.is_processed_rewards_discrete: processed_reward_type = tf.int64 data_fields = { TIMESTEP_FIELD: tf.FixedLenFeature((1,), tf.int64), RAW_REWARD_FIELD: tf.FixedLenFeature((1,), tf.float32), PROCESSED_REWARD_FIELD: tf.FixedLenFeature((1,), processed_reward_type), DONE_FIELD: tf.FixedLenFeature((1,), tf.int64), OBSERVATION_FIELD: self.observation_spec, ACTION_FIELD: self.action_spec, } data_items_to_decoders = { field: tf.contrib.slim.tfexample_decoder.Tensor(field) for field in data_fields } return data_fields, data_items_to_decoders
Data fields to store on disk and their decoders.
def scalac_classpath_entries(self): return ScalaPlatform.global_instance().compiler_classpath_entries( self.context.products, self.context._scheduler)
Returns classpath entries for the scalac classpath.
def run(self): empty = False while not empty: try: s = self.series.get() result_dict = itunes.get_rss_feed_data_from_series(s) self.storer.store(result_dict) self.logger.info('Retrieved and stored %s', str(s.id)) except Exception as e: print e finally: self.series.task_done() empty = self.series.empty()
Run the task - compose full series + add to our results
def dump(self): return { 'title': self.title, 'issue_id': self.issue_id, 'reporter': self.reporter, 'assignee': self.assignee, 'status': self.status, 'product': self.product, 'component': self.component, 'created_at': self.created_at, 'updated_at': self.updated_at, 'closed_at': self.closed_at, 'status_code': self.status_code }
Return the object itself.
def path(self): if isinstance(self.dir, Directory): return self.dir._path elif isinstance(self.dir, ROOT.TDirectory): return self.dir.GetPath() elif isinstance(self.dir, _FolderView): return self.dir.path() else: return str(self.dir)
Get the path of the wrapped folder
def _terms(self): res = [] for sign, terms in self.terms.items(): for ID, lon in terms.items(): res.append(self.T(ID, sign)) return res
Returns a list with the objects as terms.
def __interrupt_search(self): if self.__search_worker_thread: self.__search_worker_thread.quit() self.__search_worker_thread.wait() self.__container.engine.stop_processing(warning=False)
Interrupt the current search.
def to_dict(self): targets = [] for cond in self._targets: targets.append(cond.to_dict()) if targets: return {'targets': targets} else: return {}
Save this message sending object into a dictionary.
def process_predelete(self, obj, pk_set=None, action=None, update_fields=None, **kwargs): build_kwargs = self._get_build_kwargs(obj, pk_set, action, update_fields, **kwargs) self.delete_cache.set(obj, build_kwargs)
Render the queryset of influenced objects and cache it.
def placeOrder(self, contract, order, orderId=None, account=None): self.requestOrderIds() useOrderId = self.orderId if orderId == None else orderId if account: order.m_account = account self.ibConn.placeOrder(useOrderId, contract, order) account_key = order.m_account self.orders[useOrderId] = { "id": useOrderId, "symbol": self.contractString(contract), "contract": contract, "status": "SENT", "reason": None, "avgFillPrice": 0., "parentId": 0, "time": datetime.fromtimestamp(int(self.time)), "account": None } if hasattr(order, "m_account"): self.orders[useOrderId]["account"] = order.m_account return useOrderId
Place order on IB TWS
def triggered(self) -> bool: if self._triggered.is_set(): return True return any(token.triggered for token in self._chain)
Return `True` or `False` whether this token has been triggered.
def wraplet_signature(app, what, name, obj, options, signature, return_annotation): try: wrapped = obj._raw_slave except AttributeError: return None else: slave_argspec = autodoc.getargspec(wrapped) slave_signature = autodoc.formatargspec(obj, *slave_argspec) return (slave_signature, return_annotation)
have wrapplets use the signature of the slave
def map_boto_exceptions(key=None, exc_pass=()): from boto.exception import BotoClientError, BotoServerError, \ StorageResponseError try: yield except StorageResponseError as e: if e.code == 'NoSuchKey': raise KeyError(key) raise IOError(str(e)) except (BotoClientError, BotoServerError) as e: if e.__class__.__name__ not in exc_pass: raise IOError(str(e))
Map boto-specific exceptions to the simplekv-API.
def objectMD5(obj): if hasattr(obj, 'target_name'): return obj.target_name() try: return textMD5(pickle.dumps(obj)) except: return ''
Get md5 of an object
def keys_create(cls, fqdn, flag): data = { "flags": flag, } meta = cls.get_fqdn_info(fqdn) url = meta['domain_keys_href'] ret, headers = cls.json_post(url, data=json.dumps(data), return_header=True) return cls.json_get(headers['location'])
Create new key entry for a domain.
def check_split_ratio(split_ratio): valid_ratio = 0. if isinstance(split_ratio, float): assert 0. < split_ratio < 1., ( "Split ratio {} not between 0 and 1".format(split_ratio)) test_ratio = 1. - split_ratio return (split_ratio, test_ratio, valid_ratio) elif isinstance(split_ratio, list): length = len(split_ratio) assert length == 2 or length == 3, ( "Length of split ratio list should be 2 or 3, got {}".format(split_ratio)) ratio_sum = sum(split_ratio) if not ratio_sum == 1.: split_ratio = [float(ratio) / ratio_sum for ratio in split_ratio] if length == 2: return tuple(split_ratio + [valid_ratio]) return tuple(split_ratio) else: raise ValueError('Split ratio must be float or a list, got {}' .format(type(split_ratio)))
Check that the split ratio argument is not malformed
def power_on(env, identifier): vsi = SoftLayer.VSManager(env.client) vs_id = helpers.resolve_id(vsi.resolve_ids, identifier, 'VS') env.client['Virtual_Guest'].powerOn(id=vs_id)
Power on a virtual server.
def process(self, sched, coro): super(WaitForSignal, self).process(sched, coro) waitlist = sched.sigwait[self.name] waitlist.append((self, coro)) if self.name in sched.signals: sig = sched.signals[self.name] if sig.recipients <= len(waitlist): sig.process(sched, sig.coro) del sig.coro del sched.signals[self.name]
Add the calling coro in a waiting for signal queue.
def convert(self, pattern: str) -> str: parameters = OrderedDict() for parameter in self.signature.parameters.values(): annotation = self.convert_type_to_parse_type(parameter) parameters[parameter.name] = "{%s:%s}" % (parameter.name, annotation) formatter = GoatFormatter() values = parameters.values() parameter_list = list(values) converted_pattern = formatter.vformat(pattern, parameter_list, parameters) self.context_params = formatter.unused_args return converted_pattern
Convert the goat step string to CFParse String
def remove_prefix(self, id): if 'prefix' not in request.params: abort(400, 'Missing prefix.') prefix = Prefix.get(int(request.params['prefix'])) prefix.pool = None prefix.save() redirect(url(controller = 'pool', action = 'edit', id = id))
Remove a prefix from pool 'id'.
def allow_unsigned(self, mav, msgId): if self.allow is None: self.allow = { mavutil.mavlink.MAVLINK_MSG_ID_RADIO : True, mavutil.mavlink.MAVLINK_MSG_ID_RADIO_STATUS : True } if msgId in self.allow: return True if self.settings.allow_unsigned: return True return False
see if an unsigned packet should be allowed
def output(self, name: str, value: Any): self.outputs[name] = value
Export a stack output with a given name and value.
def generate(data, dimOrder, maxWindowSize, overlapPercent, transforms = []): width = data.shape[dimOrder.index('w')] height = data.shape[dimOrder.index('h')] return generateForSize(width, height, dimOrder, maxWindowSize, overlapPercent, transforms)
Generates a set of sliding windows for the specified dataset.
def update_buttons(self): current_scheme = self.current_scheme names = self.get_option("names") try: names.pop(names.index(u'Custom')) except ValueError: pass delete_enabled = current_scheme not in names self.delete_button.setEnabled(delete_enabled) self.reset_button.setEnabled(not delete_enabled)
Updates the enable status of delete and reset buttons.
def print_docs(self): docs = {} for name, func in six.iteritems(self.minion.functions): if name not in docs: if func.__doc__: docs[name] = func.__doc__ for name in sorted(docs): if name.startswith(self.opts.get('fun', '')): salt.utils.stringutils.print_cli('{0}:\n{1}\n'.format(name, docs[name]))
Pick up the documentation for all of the modules and print it out.
def delete(self, task_id): if isinstance(task_id, RegisteredTask): task_id = task_id.id def cloud_delete(api): api.delete(task_id) if len(self._threads): self.put(cloud_delete) else: cloud_delete(self._api) return self
Deletes a task from a TaskQueue.
def _create_scaling_policies(conn, as_name, scaling_policies): 'helper function to create scaling policies' if scaling_policies: for policy in scaling_policies: policy = autoscale.policy.ScalingPolicy( name=policy["name"], as_name=as_name, adjustment_type=policy["adjustment_type"], scaling_adjustment=policy["scaling_adjustment"], min_adjustment_step=policy.get("min_adjustment_step", None), cooldown=policy["cooldown"]) conn.create_scaling_policy(policy)
helper function to create scaling policies
def depth(self, pos): parent = self.parent_position(pos) if parent is None: return 0 else: return self.depth(parent) + 1
determine depth of node at pos