_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3
values | text stringlengths 75 19.8k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q276300 | ConnectorDB.reset_apikey | test | def reset_apikey(self):
"""invalidates the device's current api key, and generates a new one. Resets current auth to use the new apikey,
since the change would have future queries fail if they use the old api key."""
apikey = Device.reset_apikey(self)
self.db.setauth(apikey)
retu... | python | {
"resource": ""
} |
q276301 | ConnectorDB.users | test | def users(self):
"""Returns the list of users in the database"""
result = self.db.read("", {"q": "ls"})
if result is None or result.json() is None:
return []
users = []
for u in result.json():
usr = self(u["name"])
usr.metadata = u
... | python | {
"resource": ""
} |
q276302 | run_bwa_index | test | def run_bwa_index(job, ref_id):
"""
Use BWA to create reference index files
:param JobFunctionWrappingJob job: passed automatically by Toil
:param str ref_id: FileStoreID for the reference genome
:return: FileStoreIDs for BWA index files
:rtype: tuple(str, str, str, str, str)
"""
job.fi... | python | {
"resource": ""
} |
q276303 | Logger.connectordb | test | def connectordb(self):
"""Returns the ConnectorDB object that the logger uses. Raises an error if Logger isn't able to connect"""
if self.__cdb is None:
logging.debug("Logger: Connecting to " + self.serverurl)
self.__cdb = ConnectorDB(self.apikey, url=self.serverurl)
retu... | python | {
"resource": ""
} |
q276304 | Logger.addStream | test | def addStream(self, streamname, schema=None, **kwargs):
"""Adds the given stream to the logger. Requires an active connection to the ConnectorDB database.
If a schema is not specified, loads the stream from the database. If a schema is specified, and the stream
does not exist, creates the strea... | python | {
"resource": ""
} |
q276305 | Logger.addStream_force | test | def addStream_force(self, streamname, schema=None):
"""This function adds the given stream to the logger, but does not check with a ConnectorDB database
to make sure that the stream exists. Use at your own risk."""
c = self.database.cursor()
c.execute("INSERT OR REPLACE INTO streams VAL... | python | {
"resource": ""
} |
q276306 | Logger.insert | test | def insert(self, streamname, value):
"""Insert the datapoint into the logger for the given stream name. The logger caches the datapoint
and eventually synchronizes it with ConnectorDB"""
if streamname not in self.streams:
raise Exception("The stream '%s' was not found" % (streamname,... | python | {
"resource": ""
} |
q276307 | Logger.sync | test | def sync(self):
"""Attempt to sync with the ConnectorDB server"""
logging.debug("Logger: Syncing...")
failed = False
try:
# Get the connectordb object
cdb = self.connectordb
# Ping the database - most connection errors will happen here
cdb... | python | {
"resource": ""
} |
q276308 | Logger.start | test | def start(self):
"""Start the logger background synchronization service. This allows you to not need to
worry about syncing with ConnectorDB - you just insert into the Logger, and the Logger
will by synced every syncperiod."""
with self.synclock:
if self.syncthread is not No... | python | {
"resource": ""
} |
q276309 | Logger.stop | test | def stop(self):
"""Stops the background synchronization thread"""
with self.synclock:
if self.syncthread is not None:
self.syncthread.cancel()
self.syncthread = None | python | {
"resource": ""
} |
q276310 | download_url_job | test | def download_url_job(job, url, name=None, s3_key_path=None, cghub_key_path=None):
"""Job version of `download_url`"""
work_dir = job.fileStore.getLocalTempDir()
fpath = download_url(job=job, url=url, work_dir=work_dir, name=name,
s3_key_path=s3_key_path, cghub_key_path=cghub_key_pat... | python | {
"resource": ""
} |
q276311 | s3am_upload_job | test | def s3am_upload_job(job, file_id, file_name, s3_dir, s3_key_path=None):
"""Job version of s3am_upload"""
work_dir = job.fileStore.getLocalTempDir()
fpath = job.fileStore.readGlobalFile(file_id, os.path.join(work_dir, file_name))
s3am_upload(job=job, fpath=fpath, s3_dir=s3_dir, num_cores=job.cores, s3_ke... | python | {
"resource": ""
} |
q276312 | labels | test | def labels(ontology, output, ols_base):
"""Output the names to the given file"""
for label in get_labels(ontology=ontology, ols_base=ols_base):
click.echo(label, file=output) | python | {
"resource": ""
} |
q276313 | tree | test | def tree(ontology, output, ols_base):
"""Output the parent-child relations to the given file"""
for parent, child in get_hierarchy(ontology=ontology, ols_base=ols_base):
click.echo('{}\t{}'.format(parent, child), file=output) | python | {
"resource": ""
} |
q276314 | get_mean_insert_size | test | def get_mean_insert_size(work_dir, bam_name):
"""Function taken from MC3 Pipeline"""
cmd = "docker run --log-driver=none --rm -v {}:/data quay.io/ucsc_cgl/samtools " \
"view -f66 {}".format(work_dir, os.path.join(work_dir, bam_name))
process = subprocess.Popen(args=cmd, shell=True, stdout=subproce... | python | {
"resource": ""
} |
q276315 | current_docker_container_id | test | def current_docker_container_id():
"""
Returns a string that represents the container ID of the current Docker container. If this
function is invoked outside of a container a NotInsideContainerError is raised.
>>> import subprocess
>>> import sys
>>> a = subprocess.check_output(['docker', 'run'... | python | {
"resource": ""
} |
q276316 | run_star | test | def run_star(job, r1_id, r2_id, star_index_url, wiggle=False, sort=True):
"""
Performs alignment of fastqs to bam via STAR
--limitBAMsortRAM step added to deal with memory explosion when sorting certain samples.
The value was chosen to complement the recommended amount of memory to have when running ST... | python | {
"resource": ""
} |
q276317 | Stream.create | test | def create(self, schema="{}", **kwargs):
"""Creates a stream given an optional JSON schema encoded as a python dict. You can also add other properties
of the stream, such as the icon, datatype or description. Create accepts both a string schema and
a dict-encoded schema."""
if isinstance... | python | {
"resource": ""
} |
q276318 | Stream.export | test | def export(self, directory):
"""Exports the stream to the given directory. The directory can't exist.
You can later import this device by running import_stream on a device.
"""
if os.path.exists(directory):
raise FileExistsError(
"The stream export directory ... | python | {
"resource": ""
} |
q276319 | Stream.device | test | def device(self):
"""returns the device which owns the given stream"""
splitted_path = self.path.split("/")
return Device(self.db,
splitted_path[0] + "/" + splitted_path[1]) | python | {
"resource": ""
} |
q276320 | get_labels | test | def get_labels(ontology, ols_base=None):
"""Iterates over the labels of terms in the ontology
:param str ontology: The name of the ontology
:param str ols_base: An optional, custom OLS base url
:rtype: iter[str]
"""
client = OlsClient(ols_base=ols_base)
return client.iter_labels(ontology) | python | {
"resource": ""
} |
q276321 | get_hierarchy | test | def get_hierarchy(ontology, ols_base=None):
"""Iterates over the parent-child relationships in an ontolog
:param str ontology: The name of the ontology
:param str ols_base: An optional, custom OLS base url
:rtype: iter[tuple[str,str]]
"""
client = OlsClient(ols_base=ols_base)
return client.... | python | {
"resource": ""
} |
q276322 | AbstractPipelineWrapper.run | test | def run(cls, name, desc):
"""
Prepares and runs the pipeline. Note this method must be invoked both from inside a
Docker container and while the docker daemon is reachable.
:param str name: The name of the command to start the workflow.
:param str desc: The description of the wo... | python | {
"resource": ""
} |
q276323 | AbstractPipelineWrapper.__populate_parser_from_config | test | def __populate_parser_from_config(self, arg_parser, config_data, prefix=''):
"""
Populates an ArgumentParser object with arguments where each argument is a key from the
given config_data dictionary.
:param str prefix: Prepends the key with this prefix delimited by a single '.' character... | python | {
"resource": ""
} |
q276324 | AbstractPipelineWrapper.__get_empty_config | test | def __get_empty_config(self):
"""
Returns the config file contents as a string. The config file is generated and then deleted.
"""
self._generate_config()
path = self._get_config_path()
with open(path, 'r') as readable:
contents = readable.read()
os.re... | python | {
"resource": ""
} |
q276325 | AbstractPipelineWrapper._get_mount_path | test | def _get_mount_path(self):
"""
Returns the path of the mount point of the current container. If this method is invoked
outside of a Docker container a NotInsideContainerError is raised. Likewise if the docker
daemon is unreachable from inside the container a UserError is raised. This met... | python | {
"resource": ""
} |
q276326 | AbstractPipelineWrapper._add_option | test | def _add_option(self, arg_parser, name, *args, **kwargs):
"""
Add an argument to the given arg_parser with the given name.
:param argparse.ArgumentParser arg_parser:
:param str name: The name of the option.
"""
arg_parser.add_argument('--' + name, *args, **kwargs) | python | {
"resource": ""
} |
q276327 | AbstractPipelineWrapper._create_argument_parser | test | def _create_argument_parser(self):
"""
Creates and returns an ArgumentParser object prepopulated with 'no clean', 'cores' and
'restart' arguments.
"""
parser = argparse.ArgumentParser(description=self._desc,
formatter_class=argparse.RawTex... | python | {
"resource": ""
} |
q276328 | AbstractPipelineWrapper._create_pipeline_command | test | def _create_pipeline_command(self, args, workdir_path, config_path):
"""
Creates and returns a list that represents a command for running the pipeline.
"""
return ([self._name, 'run', os.path.join(workdir_path, 'jobStore'),
'--config', config_path,
'--wo... | python | {
"resource": ""
} |
q276329 | DatabaseConnection.setauth | test | def setauth(self, user_or_apikey=None, user_password=None):
""" setauth sets the authentication header for use in the session.
It is for use when apikey is updated or something of the sort, such that
there is a seamless experience. """
auth = None
if user_or_apikey is not None:
... | python | {
"resource": ""
} |
q276330 | DatabaseConnection.handleresult | test | def handleresult(self, r):
"""Handles HTTP error codes for the given request
Raises:
AuthenticationError on the appropriate 4** errors
ServerError if the response is not an ok (2**)
Arguments:
r -- The request result
"""
if r.status_code >= 4... | python | {
"resource": ""
} |
q276331 | DatabaseConnection.ping | test | def ping(self):
"""Attempts to ping the server using current credentials, and responds with the path of the currently
authenticated device"""
return self.handleresult(self.r.get(self.url,
params={"q": "this"})).text | python | {
"resource": ""
} |
q276332 | DatabaseConnection.create | test | def create(self, path, data=None):
"""Send a POST CRUD API request to the given path using the given data which will be converted
to json"""
return self.handleresult(self.r.post(urljoin(self.url + CRUD_PATH,
path),
... | python | {
"resource": ""
} |
q276333 | DatabaseConnection.update | test | def update(self, path, data=None):
"""Send an update request to the given path of the CRUD API, with the given data dict, which will be converted
into json"""
return self.handleresult(self.r.put(urljoin(self.url + CRUD_PATH,
path),
... | python | {
"resource": ""
} |
q276334 | DatabaseConnection.delete | test | def delete(self, path):
"""Send a delete request to the given path of the CRUD API. This deletes the object. Or at least tries to."""
return self.handleresult(self.r.delete(urljoin(self.url + CRUD_PATH,
path))) | python | {
"resource": ""
} |
q276335 | DatabaseConnection.subscribe | test | def subscribe(self, stream, callback, transform=""):
"""Subscribe to the given stream with the callback"""
return self.ws.subscribe(stream, callback, transform) | python | {
"resource": ""
} |
q276336 | User.create | test | def create(self, email, password, role="user", public=True, **kwargs):
"""Creates the given user - using the passed in email and password.
You can also set other default properties by passing in the relevant information::
usr.create("my@email","mypass",description="I like trains.")
... | python | {
"resource": ""
} |
q276337 | User.devices | test | def devices(self):
"""Returns the list of devices that belong to the user"""
result = self.db.read(self.path, {"q": "ls"})
if result is None or result.json() is None:
return []
devices = []
for d in result.json():
dev = self[d["name"]]
dev.met... | python | {
"resource": ""
} |
q276338 | run_cutadapt | test | def run_cutadapt(job, r1_id, r2_id, fwd_3pr_adapter, rev_3pr_adapter):
"""
Adapter trimming for RNA-seq data
:param JobFunctionWrappingJob job: passed automatically by Toil
:param str r1_id: FileStoreID of fastq read 1
:param str r2_id: FileStoreID of fastq read 2 (if paired data)
:param str fw... | python | {
"resource": ""
} |
q276339 | run_samtools_faidx | test | def run_samtools_faidx(job, ref_id):
"""
Use SAMtools to create reference index file
:param JobFunctionWrappingJob job: passed automatically by Toil
:param str ref_id: FileStoreID for the reference genome
:return: FileStoreID for reference index
:rtype: str
"""
job.fileStore.logToMaster... | python | {
"resource": ""
} |
q276340 | run_samtools_index | test | def run_samtools_index(job, bam):
"""
Runs SAMtools index to create a BAM index file
:param JobFunctionWrappingJob job: passed automatically by Toil
:param str bam: FileStoreID of the BAM file
:return: FileStoreID for BAM index file
:rtype: str
"""
work_dir = job.fileStore.getLocalTempD... | python | {
"resource": ""
} |
q276341 | run_sambamba_markdup | test | def run_sambamba_markdup(job, bam):
"""
Marks reads as PCR duplicates using Sambamba
:param JobFunctionWrappingJob job: passed automatically by Toil
:param str bam: FileStoreID for BAM file
:return: FileStoreID for sorted BAM file
:rtype: str
"""
work_dir = job.fileStore.getLocalTempDir... | python | {
"resource": ""
} |
q276342 | run_samblaster | test | def run_samblaster(job, sam):
"""
Marks reads as PCR duplicates using SAMBLASTER
:param JobFunctionWrappingJob job: passed automatically by Toil
:param str sam: FileStoreID for SAM file
:return: FileStoreID for deduped SAM file
:rtype: str
"""
work_dir = job.fileStore.getLocalTempDir()
... | python | {
"resource": ""
} |
q276343 | picard_mark_duplicates | test | def picard_mark_duplicates(job, bam, bai, validation_stringency='LENIENT'):
"""
Runs Picard MarkDuplicates on a BAM file. Requires that the BAM file be coordinate sorted.
:param JobFunctionWrappingJob job: passed automatically by Toil
:param str bam: FileStoreID for BAM file
:param str bai: FileSto... | python | {
"resource": ""
} |
q276344 | run_picard_sort | test | def run_picard_sort(job, bam, sort_by_name=False):
"""
Sorts BAM file using Picard SortSam
:param JobFunctionWrappingJob job: passed automatically by Toil
:param str bam: FileStoreID for BAM file
:param boolean sort_by_name: If true, sorts by read name instead of coordinate.
:return: FileStoreI... | python | {
"resource": ""
} |
q276345 | run_base_recalibration | test | def run_base_recalibration(job, bam, bai, ref, ref_dict, fai, dbsnp, mills, unsafe=False):
"""
Creates recalibration table for Base Quality Score Recalibration
:param JobFunctionWrappingJob job: passed automatically by Toil
:param str bam: FileStoreID for BAM file
:param str bai: FileStoreID for BA... | python | {
"resource": ""
} |
q276346 | run_kallisto | test | def run_kallisto(job, r1_id, r2_id, kallisto_index_url):
"""
RNA quantification via Kallisto
:param JobFunctionWrappingJob job: passed automatically by Toil
:param str r1_id: FileStoreID of fastq (pair 1)
:param str r2_id: FileStoreID of fastq (pair 2 if applicable, otherwise pass None for single-e... | python | {
"resource": ""
} |
q276347 | run_rsem | test | def run_rsem(job, bam_id, rsem_ref_url, paired=True):
"""
RNA quantification with RSEM
:param JobFunctionWrappingJob job: Passed automatically by Toil
:param str bam_id: FileStoreID of transcriptome bam for quantification
:param str rsem_ref_url: URL of RSEM reference (tarball)
:param bool pair... | python | {
"resource": ""
} |
q276348 | SARPlus.get_user_affinity | test | def get_user_affinity(self, test):
"""Prepare test set for C++ SAR prediction code.
Find all items the test users have seen in the past.
Arguments:
test (pySpark.DataFrame): input dataframe which contains test users.
"""
test.createOrReplaceTempView(self.f("{prefix}d... | python | {
"resource": ""
} |
q276349 | WebsocketHandler.send | test | def send(self, cmd):
"""Send the given command thru the websocket"""
with self.ws_sendlock:
self.ws.send(json.dumps(cmd)) | python | {
"resource": ""
} |
q276350 | WebsocketHandler.subscribe | test | def subscribe(self, stream, callback, transform=""):
"""Given a stream, a callback and an optional transform, sets up the subscription"""
if self.status == "disconnected" or self.status == "disconnecting" or self.status == "connecting":
self.connect()
if self.status is not "connected... | python | {
"resource": ""
} |
q276351 | WebsocketHandler.connect | test | def connect(self):
"""Attempt to connect to the websocket - and returns either True or False depending on if
the connection was successful or not"""
# Wait for the lock to be available (ie, the websocket is not being used (yet))
self.ws_openlock.acquire()
self.ws_openlock.releas... | python | {
"resource": ""
} |
q276352 | WebsocketHandler.__reconnect | test | def __reconnect(self):
"""This is called when a connection is lost - it attempts to reconnect to the server"""
self.status = "reconnecting"
# Reset the disconnect time after 15 minutes
if self.disconnected_time - self.connected_time > 15 * 60:
self.reconnect_time = self.reco... | python | {
"resource": ""
} |
q276353 | WebsocketHandler.__resubscribe | test | def __resubscribe(self):
"""Send subscribe command for all existing subscriptions. This allows to resume a connection
that was closed"""
with self.subscription_lock:
for sub in self.subscriptions:
logging.debug("Resubscribing to %s", sub)
stream_transf... | python | {
"resource": ""
} |
q276354 | WebsocketHandler.__on_open | test | def __on_open(self, ws):
"""Called when the websocket is opened"""
logging.debug("ConnectorDB: Websocket opened")
# Connection success - decrease the wait time for next connection
self.reconnect_time /= self.reconnect_time_backoff_multiplier
self.status = "connected"
s... | python | {
"resource": ""
} |
q276355 | WebsocketHandler.__on_close | test | def __on_close(self, ws):
"""Called when the websocket is closed"""
if self.status == "disconnected":
return # This can be double-called on disconnect
logging.debug("ConnectorDB:WS: Websocket closed")
# Turn off the ping timer
if self.pingtimer is not None:
... | python | {
"resource": ""
} |
q276356 | WebsocketHandler.__on_error | test | def __on_error(self, ws, err):
"""Called when there is an error in the websocket"""
logging.debug("ConnectorDB:WS: Connection Error")
if self.status == "connecting":
self.status = "errored"
self.ws_openlock.release() | python | {
"resource": ""
} |
q276357 | WebsocketHandler.__on_message | test | def __on_message(self, ws, msg):
"""This function is called whenever there is a message received from the server"""
msg = json.loads(msg)
logging.debug("ConnectorDB:WS: Msg '%s'", msg["stream"])
# Build the subcription key
stream_key = msg["stream"] + ":"
if "transform" ... | python | {
"resource": ""
} |
q276358 | WebsocketHandler.__ensure_ping | test | def __ensure_ping(self):
"""Each time the server sends a ping message, we record the timestamp. If we haven't received a ping
within the given interval, then we assume that the connection was lost, close the websocket and
attempt to reconnect"""
logging.debug("ConnectorDB:WS: pingcheck"... | python | {
"resource": ""
} |
q276359 | gatk_select_variants | test | def gatk_select_variants(job, mode, vcf_id, ref_fasta, ref_fai, ref_dict):
"""
Isolates a particular variant type from a VCF file using GATK SelectVariants
:param JobFunctionWrappingJob job: passed automatically by Toil
:param str mode: variant type (i.e. SNP or INDEL)
:param str vcf_id: FileStoreI... | python | {
"resource": ""
} |
q276360 | gatk_variant_filtration | test | def gatk_variant_filtration(job, vcf_id, filter_name, filter_expression, ref_fasta, ref_fai, ref_dict):
"""
Filters VCF file using GATK VariantFiltration. Fixes extra pair of quotation marks in VCF header that
may interfere with other VCF tools.
:param JobFunctionWrappingJob job: passed automatically b... | python | {
"resource": ""
} |
q276361 | gatk_variant_recalibrator | test | def gatk_variant_recalibrator(job,
mode,
vcf,
ref_fasta, ref_fai, ref_dict,
annotations,
hapmap=None, omni=None, phase=None, dbsnp=None, mills=None,
... | python | {
"resource": ""
} |
q276362 | gatk_apply_variant_recalibration | test | def gatk_apply_variant_recalibration(job,
mode,
vcf,
recal_table, tranches,
ref_fasta, ref_fai, ref_dict,
ts_filter_level=99.0,
... | python | {
"resource": ""
} |
q276363 | gatk_combine_variants | test | def gatk_combine_variants(job, vcfs, ref_fasta, ref_fai, ref_dict, merge_option='UNIQUIFY'):
"""
Merges VCF files using GATK CombineVariants
:param JobFunctionWrappingJob job: Toil Job instance
:param dict vcfs: Dictionary of VCF FileStoreIDs {sample identifier: FileStoreID}
:param str ref_fasta: F... | python | {
"resource": ""
} |
q276364 | bam_quickcheck | test | def bam_quickcheck(bam_path):
"""
Perform a quick check on a BAM via `samtools quickcheck`.
This will detect obvious BAM errors such as truncation.
:param str bam_path: path to BAM file to checked
:rtype: boolean
:return: True if the BAM is valid, False is BAM is invalid or something related t... | python | {
"resource": ""
} |
q276365 | load_handlers | test | def load_handlers(handler_mapping):
"""
Given a dictionary mapping which looks like the following, import the
objects based on the dotted path and yield the packet type and handler as
pairs.
If the special string '*' is passed, don't process that, pass it on as it
is a wildcard.
If an non-... | python | {
"resource": ""
} |
q276366 | write_config | test | def write_config(configuration):
"""Helper to write the JSON configuration to a file"""
with open(CONFIG_PATH, 'w') as f:
json.dump(configuration, f, indent=2, sort_keys=True) | python | {
"resource": ""
} |
q276367 | get_config | test | def get_config():
"""Gets the configuration for this project from the default JSON file, or writes one if it doesn't exist
:rtype: dict
"""
if not os.path.exists(CONFIG_PATH):
write_config({})
with open(CONFIG_PATH) as f:
return json.load(f) | python | {
"resource": ""
} |
q276368 | OlsClient.get_term | test | def get_term(self, ontology, iri):
"""Gets the data for a given term
:param str ontology: The name of the ontology
:param str iri: The IRI of a term
:rtype: dict
"""
url = self.ontology_term_fmt.format(ontology, iri)
response = requests.get(url)
return r... | python | {
"resource": ""
} |
q276369 | OlsClient.search | test | def search(self, name, query_fields=None):
"""Searches the OLS with the given term
:param str name:
:param list[str] query_fields: Fields to query
:return: dict
"""
params = {'q': name}
if query_fields is not None:
params['queryFields'] = '{{{}}}'.for... | python | {
"resource": ""
} |
q276370 | OlsClient.suggest | test | def suggest(self, name, ontology=None):
"""Suggest terms from an optional list of ontologies
:param str name:
:param list[str] ontology:
:rtype: dict
.. seealso:: https://www.ebi.ac.uk/ols/docs/api#_suggest_term
"""
params = {'q': name}
if ontology:
... | python | {
"resource": ""
} |
q276371 | OlsClient.iter_descendants | test | def iter_descendants(self, ontology, iri, size=None, sleep=None):
"""Iterates over the descendants of a given term
:param str ontology: The name of the ontology
:param str iri: The IRI of a term
:param int size: The size of each page. Defaults to 500, which is the maximum allowed by the... | python | {
"resource": ""
} |
q276372 | OlsClient.iter_descendants_labels | test | def iter_descendants_labels(self, ontology, iri, size=None, sleep=None):
"""Iterates over the labels for the descendants of a given term
:param str ontology: The name of the ontology
:param str iri: The IRI of a term
:param int size: The size of each page. Defaults to 500, which is the ... | python | {
"resource": ""
} |
q276373 | OlsClient.iter_labels | test | def iter_labels(self, ontology, size=None, sleep=None):
"""Iterates over the labels of terms in the ontology. Automatically wraps the pager returned by the OLS.
:param str ontology: The name of the ontology
:param int size: The size of each page. Defaults to 500, which is the maximum allowed by... | python | {
"resource": ""
} |
q276374 | OlsClient.iter_hierarchy | test | def iter_hierarchy(self, ontology, size=None, sleep=None):
"""Iterates over parent-child relations
:param str ontology: The name of the ontology
:param int size: The size of each page. Defaults to 500, which is the maximum allowed by the EBI.
:param int sleep: The amount of time to slee... | python | {
"resource": ""
} |
q276375 | run_fastqc | test | def run_fastqc(job, r1_id, r2_id):
"""
Run Fastqc on the input reads
:param JobFunctionWrappingJob job: passed automatically by Toil
:param str r1_id: FileStoreID of fastq read 1
:param str r2_id: FileStoreID of fastq read 2
:return: FileStoreID of fastQC output (tarball)
:rtype: str
""... | python | {
"resource": ""
} |
q276376 | Merge.addStream | test | def addStream(self, stream, t1=None, t2=None, limit=None, i1=None, i2=None, transform=None):
"""Adds the given stream to the query construction. The function supports both stream
names and Stream objects."""
params = query_maker(t1, t2, limit, i1, i2, transform)
params["stream"] = ... | python | {
"resource": ""
} |
q276377 | create_app | test | def create_app(config=None):
""" This needs some tidying up. To avoid circular imports we import
everything here but it makes this method a bit more gross.
"""
# Initialise the app
from home.config import TEMPLATE_FOLDER, STATIC_FOLDER
app = Flask(__name__, static_folder=STATIC_FOLDER,
... | python | {
"resource": ""
} |
q276378 | SparkService.start | test | def start(self, job):
"""
Start spark and hdfs master containers
:param job: The underlying job.
"""
if self.hostname is None:
self.hostname = subprocess.check_output(["hostname", "-f",])[:-1]
_log.info("Started Spark master container.")
self.sparkC... | python | {
"resource": ""
} |
q276379 | WorkerService.start | test | def start(self, job):
"""
Start spark and hdfs worker containers
:param job: The underlying job.
"""
# start spark and our datanode
self.sparkContainerID = dockerCheckOutput(job=job,
defer=STOP,
... | python | {
"resource": ""
} |
q276380 | WorkerService.__start_datanode | test | def __start_datanode(self, job):
"""
Launches the Hadoop datanode.
:param job: The underlying job.
"""
self.hdfsContainerID = dockerCheckOutput(job=job,
defer=STOP,
workDir=os.getcw... | python | {
"resource": ""
} |
q276381 | WorkerService.stop | test | def stop(self, fileStore):
"""
Stop spark and hdfs worker containers
:param job: The underlying job.
"""
subprocess.call(["docker", "exec", self.sparkContainerID, "rm", "-r", "/ephemeral/spark"])
subprocess.call(["docker", "stop", self.sparkContainerID])
subproc... | python | {
"resource": ""
} |
q276382 | WorkerService.check | test | def check(self):
"""
Checks to see if Spark worker and HDFS datanode are still running.
"""
status = _checkContainerStatus(self.sparkContainerID,
self.hdfsContainerID,
sparkNoun='worker',
... | python | {
"resource": ""
} |
q276383 | base_tokenizer | test | def base_tokenizer(fp):
'Tokenizer. Generates tokens stream from text'
if isinstance(fp, StringIO):
template_file = fp
size = template_file.len
else:
#empty file check
if os.fstat(fp.fileno()).st_size == 0:
yield TOKEN_EOF, 'EOF', 0, 0
return
t... | python | {
"resource": ""
} |
q276384 | lookup_zone | test | def lookup_zone(conn, zone):
"""Look up a zone ID for a zone string.
Args: conn: boto.route53.Route53Connection
zone: string eg. foursquare.com
Returns: zone ID eg. ZE2DYFZDWGSL4.
Raises: ZoneNotFoundError if zone not found."""
all_zones = conn.get_all_hosted_zones()
for resp in all_zones['ListHost... | python | {
"resource": ""
} |
q276385 | fetch_config | test | def fetch_config(zone, conn):
"""Fetch all pieces of a Route 53 config from Amazon.
Args: zone: string, hosted zone id.
conn: boto.route53.Route53Connection
Returns: list of ElementTrees, one for each piece of config."""
more_to_fetch = True
cfg_chunks = []
next_name = None
next_type = None
nex... | python | {
"resource": ""
} |
q276386 | merge_config | test | def merge_config(cfg_chunks):
"""Merge a set of fetched Route 53 config Etrees into a canonical form.
Args: cfg_chunks: [ lxml.etree.ETree ]
Returns: lxml.etree.Element"""
root = lxml.etree.XML('<ResourceRecordSets xmlns="%s"></ResourceRecordSets>' % R53_XMLNS, parser=XML_PARSER)
for chunk in cfg_chunks:
... | python | {
"resource": ""
} |
q276387 | validate_changeset | test | def validate_changeset(changeset):
"""Validate a changeset is compatible with Amazon's API spec.
Args: changeset: lxml.etree.Element (<ChangeResourceRecordSetsRequest>)
Returns: [ errors ] list of error strings or []."""
errors = []
changes = changeset.findall('.//{%s}Change' % R53_XMLNS)
num_changes = len... | python | {
"resource": ""
} |
q276388 | minimize_best_n | test | def minimize_best_n(Members):
'''
Orders population members from lowest fitness to highest fitness
Args:
Members (list): list of PyGenetics Member objects
Returns:
lsit: ordered lsit of Members, from highest fitness to lowest fitness
'''
return(list(reversed(sorted(
Me... | python | {
"resource": ""
} |
q276389 | Population.fitness | test | def fitness(self):
'''Population fitness == average member fitness score'''
if len(self.__members) != 0:
if self.__num_processes > 1:
members = [m.get() for m in self.__members]
else:
members = self.__members
return sum(m.fitness_score... | python | {
"resource": ""
} |
q276390 | Population.ave_cost_fn_val | test | def ave_cost_fn_val(self):
'''Returns average cost function return value for all members'''
if len(self.__members) != 0:
if self.__num_processes > 1:
members = [m.get() for m in self.__members]
else:
members = self.__members
return sum... | python | {
"resource": ""
} |
q276391 | Population.med_cost_fn_val | test | def med_cost_fn_val(self):
'''Returns median cost function return value for all members'''
if len(self.__members) != 0:
if self.__num_processes > 1:
members = [m.get() for m in self.__members]
else:
members = self.__members
return medi... | python | {
"resource": ""
} |
q276392 | Population.parameters | test | def parameters(self):
'''Population parameter vals == average member parameter vals'''
if len(self.__members) != 0:
if self.__num_processes > 1:
members = [m.get() for m in self.__members]
else:
members = self.__members
params = {}
... | python | {
"resource": ""
} |
q276393 | Population.members | test | def members(self):
'''Returns Member objects of population'''
if self.__num_processes > 1:
return [m.get() for m in self.__members]
else:
return self.__members | python | {
"resource": ""
} |
q276394 | Population.add_parameter | test | def add_parameter(self, name, min_val, max_val):
'''Adds a paramber to the Population
Args:
name (str): name of the parameter
min_val (int or float): minimum value for the parameter
max_val (int or float): maximum value for the parameter
'''
self.__p... | python | {
"resource": ""
} |
q276395 | Population.next_generation | test | def next_generation(self, mut_rate=0, max_mut_amt=0, log_base=10):
'''Generates the next population from a previously evaluated generation
Args:
mut_rate (float): mutation rate for new members (0.0 - 1.0)
max_mut_amt (float): how much the member is allowed to mutate
... | python | {
"resource": ""
} |
q276396 | ConfigOptionParser.normalize_keys | test | def normalize_keys(self, items):
"""Return a config dictionary with normalized keys regardless of
whether the keys were specified in environment variables or in config
files"""
normalized = {}
for key, val in items:
key = key.replace('_', '-')
if not key.s... | python | {
"resource": ""
} |
q276397 | ConfigOptionParser.get_environ_vars | test | def get_environ_vars(self):
"""Returns a generator with all environmental vars with prefix PIP_"""
for key, val in os.environ.items():
if _environ_prefix_re.search(key):
yield (_environ_prefix_re.sub("", key).lower(), val) | python | {
"resource": ""
} |
q276398 | throws_exception | test | def throws_exception(callable, *exceptions):
"""
Return True if the callable throws the specified exception
>>> throws_exception(lambda: int('3'))
False
>>> throws_exception(lambda: int('a'))
True
>>> throws_exception(lambda: int('a'), KeyError)
False
"""
with context.ExceptionTrap():
with context.Exceptio... | python | {
"resource": ""
} |
q276399 | transform_hits | test | def transform_hits(hits):
"""
The list from pypi is really a list of versions. We want a list of
packages with the list of versions stored inline. This converts the
list from pypi into one we can use.
"""
packages = {}
for hit in hits:
name = hit['name']
summary = hit['summar... | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.