text stringlengths 81 112k |
|---|
Call this method before importing a ga4gh module in the scripts dir.
Otherwise, you will be using the installed package instead of
the development package.
Assumes a certain directory structure.
def ga4ghImportGlue():
"""
Call this method before importing a ga4gh module in the scripts dir.
Otherwise, you will be using the installed package instead of
the development package.
Assumes a certain directory structure.
"""
path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(path) |
Update the priority of the file handle. The element is first
removed and then added to the left of the deque.
def _update(self, dataFile, handle):
"""
Update the priority of the file handle. The element is first
removed and then added to the left of the deque.
"""
self._cache.remove((dataFile, handle))
self._add(dataFile, handle) |
Remove the least recently used file handle from the cache.
The pop method removes an element from the right of the deque.
Returns the name of the file that has been removed.
def _removeLru(self):
"""
Remove the least recently used file handle from the cache.
The pop method removes an element from the right of the deque.
Returns the name of the file that has been removed.
"""
(dataFile, handle) = self._cache.pop()
handle.close()
return dataFile |
Returns handle associated to the filename. If the file is
already opened, update its priority in the cache and return
its handle. Otherwise, open the file using openMethod, store
it in the cache and return the corresponding handle.
def getFileHandle(self, dataFile, openMethod):
"""
Returns handle associated to the filename. If the file is
already opened, update its priority in the cache and return
its handle. Otherwise, open the file using openMethod, store
it in the cache and return the corresponding handle.
"""
if dataFile in self._memoTable:
handle = self._memoTable[dataFile]
self._update(dataFile, handle)
return handle
else:
try:
handle = openMethod(dataFile)
except ValueError:
raise exceptions.FileOpenFailedException(dataFile)
self._memoTable[dataFile] = handle
self._add(dataFile, handle)
if len(self._memoTable) > self._maxCacheSize:
dataFile = self._removeLru()
del self._memoTable[dataFile]
return handle |
Join an array of ids into a compound id string
def join(cls, splits):
"""
Join an array of ids into a compound id string
"""
segments = []
for split in splits:
segments.append('"{}",'.format(split))
if len(segments) > 0:
segments[-1] = segments[-1][:-1]
jsonString = '[{}]'.format(''.join(segments))
return jsonString |
Parses the specified compoundId string and returns an instance
of this CompoundId class.
:raises: An ObjectWithIdNotFoundException if parsing fails. This is
because this method is a client-facing method, and if a malformed
identifier (under our internal rules) is provided, the response should
be that the identifier does not exist.
def parse(cls, compoundIdStr):
"""
Parses the specified compoundId string and returns an instance
of this CompoundId class.
:raises: An ObjectWithIdNotFoundException if parsing fails. This is
because this method is a client-facing method, and if a malformed
identifier (under our internal rules) is provided, the response should
be that the identifier does not exist.
"""
if not isinstance(compoundIdStr, basestring):
raise exceptions.BadIdentifierException(compoundIdStr)
try:
deobfuscated = cls.deobfuscate(compoundIdStr)
except TypeError:
# When a string that cannot be converted to base64 is passed
# as an argument, b64decode raises a TypeError. We must treat
# this as an ID not found error.
raise exceptions.ObjectWithIdNotFoundException(compoundIdStr)
try:
encodedSplits = cls.split(deobfuscated)
splits = [cls.decode(split) for split in encodedSplits]
except (UnicodeDecodeError, ValueError):
# Sometimes base64 decoding succeeds but we're left with
# unicode gibberish. This is also and IdNotFound.
raise exceptions.ObjectWithIdNotFoundException(compoundIdStr)
# pull the differentiator out of the splits before instantiating
# the class, if the differentiator exists
fieldsLength = len(cls.fields)
if cls.differentiator is not None:
differentiatorIndex = cls.fields.index(
cls.differentiatorFieldName)
if differentiatorIndex < len(splits):
del splits[differentiatorIndex]
else:
raise exceptions.ObjectWithIdNotFoundException(
compoundIdStr)
fieldsLength -= 1
if len(splits) != fieldsLength:
raise exceptions.ObjectWithIdNotFoundException(compoundIdStr)
return cls(None, *splits) |
Mildly obfuscates the specified ID string in an easily reversible
fashion. This is not intended for security purposes, but rather to
dissuade users from depending on our internal ID structures.
def obfuscate(cls, idStr):
"""
Mildly obfuscates the specified ID string in an easily reversible
fashion. This is not intended for security purposes, but rather to
dissuade users from depending on our internal ID structures.
"""
return unicode(base64.urlsafe_b64encode(
idStr.encode('utf-8')).replace(b'=', b'')) |
Reverses the obfuscation done by the :meth:`obfuscate` method.
If an identifier arrives without correct base64 padding this
function will append it to the end.
def deobfuscate(cls, data):
"""
Reverses the obfuscation done by the :meth:`obfuscate` method.
If an identifier arrives without correct base64 padding this
function will append it to the end.
"""
# the str() call is necessary to convert the unicode string
# to an ascii string since the urlsafe_b64decode method
# sometimes chokes on unicode strings
return base64.urlsafe_b64decode(str((
data + b'A=='[(len(data) - 1) % 4:]))) |
Sets the attrbutes of a message during serialization.
def serializeAttributes(self, msg):
"""
Sets the attrbutes of a message during serialization.
"""
attributes = self.getAttributes()
for key in attributes:
protocol.setAttribute(
msg.attributes.attr[key].values, attributes[key])
return msg |
Scans the specified directory for files with the specified globbing
pattern and calls self._addDataFile for each. Raises an
EmptyDirException if no data files are found.
def _scanDataFiles(self, dataDir, patterns):
"""
Scans the specified directory for files with the specified globbing
pattern and calls self._addDataFile for each. Raises an
EmptyDirException if no data files are found.
"""
numDataFiles = 0
for pattern in patterns:
scanPath = os.path.join(dataDir, pattern)
for filename in glob.glob(scanPath):
self._addDataFile(filename)
numDataFiles += 1
if numDataFiles == 0:
raise exceptions.EmptyDirException(dataDir, patterns) |
Attempts to get a list of peers from a file specified in configuration.
This file has one URL per line and can contain newlines and comments.
# Main ga4gh node
http://1kgenomes.ga4gh.org
# Local intranet peer
https://192.168.1.1
The server will attempt to add URLs in this file to its registry at
startup and will log a warning if the file isn't found.
def getInitialPeerList(filePath, logger=None):
"""
Attempts to get a list of peers from a file specified in configuration.
This file has one URL per line and can contain newlines and comments.
# Main ga4gh node
http://1kgenomes.ga4gh.org
# Local intranet peer
https://192.168.1.1
The server will attempt to add URLs in this file to its registry at
startup and will log a warning if the file isn't found.
"""
ret = []
with open(filePath) as textFile:
ret = textFile.readlines()
if len(ret) == 0:
if logger:
logger.warn("Couldn't load the initial "
"peer list. Try adding a "
"file named 'initial_peers.txt' "
"to {}".format(os.getcwd()))
# Remove lines that start with a hash or are empty.
return filter(lambda x: x != "" and not x.find("#") != -1, ret) |
Takes the datarepository, a url, and an optional logger and attempts
to add the peer into the repository.
def insertInitialPeer(dataRepository, url, logger=None):
"""
Takes the datarepository, a url, and an optional logger and attempts
to add the peer into the repository.
"""
insertPeer = dataRepository.insertPeer
try:
peer = datamodel.peers.Peer(url)
insertPeer(peer)
except exceptions.RepoManagerException as exc:
if logger:
logger.debug(
"Peer already in registry {} {}".format(peer.getUrl(), exc))
except exceptions.BadUrlException as exc:
if logger:
logger.debug("A URL in the initial "
"peer list {} was malformed. {}".format(url), exc) |
Attempts to return whether a given URL string is valid by checking
for the presence of the URL scheme and netloc using the urlparse
module, and then using a regex.
From http://stackoverflow.com/questions/7160737/
def isUrl(urlString):
"""
Attempts to return whether a given URL string is valid by checking
for the presence of the URL scheme and netloc using the urlparse
module, and then using a regex.
From http://stackoverflow.com/questions/7160737/
"""
parsed = urlparse.urlparse(urlString)
urlparseValid = parsed.netloc != '' and parsed.scheme != ''
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)'
r'+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
return regex.match(urlString) and urlparseValid |
Attempt to safely set the URL by string.
def setUrl(self, url):
"""
Attempt to safely set the URL by string.
"""
if isUrl(url):
self._url = url
else:
raise exceptions.BadUrlException(url)
return self |
Sets the attributes dictionary from a JSON string.
def setAttributesJson(self, attributesJson):
"""
Sets the attributes dictionary from a JSON string.
"""
try:
self._attributes = json.loads(attributesJson)
except:
raise exceptions.InvalidJsonException(attributesJson)
return self |
This method accepts a model record and sets class variables.
def populateFromRow(self, peerRecord):
"""
This method accepts a model record and sets class variables.
"""
self.setUrl(peerRecord.url) \
.setAttributesJson(peerRecord.attributes)
return self |
Returns a generator over the results for the specified request, which
is over a set of objects of the specified size. The objects are
returned by call to the specified method, which must take a single
integer as an argument. The returned generator yields a sequence of
(object, nextPageToken) pairs, which allows this iteration to be picked
up at any point.
def _topLevelObjectGenerator(self, request, numObjects, getByIndexMethod):
"""
Returns a generator over the results for the specified request, which
is over a set of objects of the specified size. The objects are
returned by call to the specified method, which must take a single
integer as an argument. The returned generator yields a sequence of
(object, nextPageToken) pairs, which allows this iteration to be picked
up at any point.
"""
currentIndex = 0
if request.page_token:
currentIndex, = paging._parsePageToken(
request.page_token, 1)
while currentIndex < numObjects:
object_ = getByIndexMethod(currentIndex)
currentIndex += 1
nextPageToken = None
if currentIndex < numObjects:
nextPageToken = str(currentIndex)
yield object_.toProtocolElement(), nextPageToken |
Returns a generator over the results for the specified request, from
a set of protocol objects of the specified size. The objects are
returned by call to the specified method, which must take a single
integer as an argument. The returned generator yields a sequence of
(object, nextPageToken) pairs, which allows this iteration to be picked
up at any point.
def _protocolObjectGenerator(self, request, numObjects, getByIndexMethod):
"""
Returns a generator over the results for the specified request, from
a set of protocol objects of the specified size. The objects are
returned by call to the specified method, which must take a single
integer as an argument. The returned generator yields a sequence of
(object, nextPageToken) pairs, which allows this iteration to be picked
up at any point.
"""
currentIndex = 0
if request.page_token:
currentIndex, = paging._parsePageToken(
request.page_token, 1)
while currentIndex < numObjects:
object_ = getByIndexMethod(currentIndex)
currentIndex += 1
nextPageToken = None
if currentIndex < numObjects:
nextPageToken = str(currentIndex)
yield object_, nextPageToken |
Returns a generator over the objects in the specified list using
_protocolObjectGenerator to generate page tokens.
def _protocolListGenerator(self, request, objectList):
"""
Returns a generator over the objects in the specified list using
_protocolObjectGenerator to generate page tokens.
"""
return self._protocolObjectGenerator(
request, len(objectList), lambda index: objectList[index]) |
Returns a generator over the objects in the specified list using
_topLevelObjectGenerator to generate page tokens.
def _objectListGenerator(self, request, objectList):
"""
Returns a generator over the objects in the specified list using
_topLevelObjectGenerator to generate page tokens.
"""
return self._topLevelObjectGenerator(
request, len(objectList), lambda index: objectList[index]) |
Returns a generator over the (dataset, nextPageToken) pairs
defined by the specified request
def datasetsGenerator(self, request):
"""
Returns a generator over the (dataset, nextPageToken) pairs
defined by the specified request
"""
return self._topLevelObjectGenerator(
request, self.getDataRepository().getNumDatasets(),
self.getDataRepository().getDatasetByIndex) |
Returns a generator over the (phenotypeAssociationSet, nextPageToken)
pairs defined by the specified request
def phenotypeAssociationSetsGenerator(self, request):
"""
Returns a generator over the (phenotypeAssociationSet, nextPageToken)
pairs defined by the specified request
"""
dataset = self.getDataRepository().getDataset(request.dataset_id)
return self._topLevelObjectGenerator(
request, dataset.getNumPhenotypeAssociationSets(),
dataset.getPhenotypeAssociationSetByIndex) |
Returns a generator over the (readGroupSet, nextPageToken) pairs
defined by the specified request.
def readGroupSetsGenerator(self, request):
"""
Returns a generator over the (readGroupSet, nextPageToken) pairs
defined by the specified request.
"""
dataset = self.getDataRepository().getDataset(request.dataset_id)
return self._readGroupSetsGenerator(
request, dataset.getNumReadGroupSets(),
dataset.getReadGroupSetByIndex) |
Returns a generator over the results for the specified request, which
is over a set of objects of the specified size. The objects are
returned by call to the specified method, which must take a single
integer as an argument. The returned generator yields a sequence of
(object, nextPageToken) pairs, which allows this iteration to be picked
up at any point.
def _readGroupSetsGenerator(self, request, numObjects, getByIndexMethod):
"""
Returns a generator over the results for the specified request, which
is over a set of objects of the specified size. The objects are
returned by call to the specified method, which must take a single
integer as an argument. The returned generator yields a sequence of
(object, nextPageToken) pairs, which allows this iteration to be picked
up at any point.
"""
currentIndex = 0
if request.page_token:
currentIndex, = paging._parsePageToken(
request.page_token, 1)
while currentIndex < numObjects:
obj = getByIndexMethod(currentIndex)
include = True
rgsp = obj.toProtocolElement()
if request.name and request.name != obj.getLocalId():
include = False
if request.biosample_id and include:
rgsp.ClearField("read_groups")
for readGroup in obj.getReadGroups():
if request.biosample_id == readGroup.getBiosampleId():
rgsp.read_groups.extend(
[readGroup.toProtocolElement()])
# If none of the biosamples match and the readgroupset
# contains reagroups, don't include in the response
if len(rgsp.read_groups) == 0 and \
len(obj.getReadGroups()) != 0:
include = False
currentIndex += 1
nextPageToken = None
if currentIndex < numObjects:
nextPageToken = str(currentIndex)
if include:
yield rgsp, nextPageToken |
Returns a generator over the (referenceSet, nextPageToken) pairs
defined by the specified request.
def referenceSetsGenerator(self, request):
"""
Returns a generator over the (referenceSet, nextPageToken) pairs
defined by the specified request.
"""
results = []
for obj in self.getDataRepository().getReferenceSets():
include = True
if request.md5checksum:
if request.md5checksum != obj.getMd5Checksum():
include = False
if request.accession:
if request.accession not in obj.getSourceAccessions():
include = False
if request.assembly_id:
if request.assembly_id != obj.getAssemblyId():
include = False
if include:
results.append(obj)
return self._objectListGenerator(request, results) |
Returns a generator over the (reference, nextPageToken) pairs
defined by the specified request.
def referencesGenerator(self, request):
"""
Returns a generator over the (reference, nextPageToken) pairs
defined by the specified request.
"""
referenceSet = self.getDataRepository().getReferenceSet(
request.reference_set_id)
results = []
for obj in referenceSet.getReferences():
include = True
if request.md5checksum:
if request.md5checksum != obj.getMd5Checksum():
include = False
if request.accession:
if request.accession not in obj.getSourceAccessions():
include = False
if include:
results.append(obj)
return self._objectListGenerator(request, results) |
Returns a generator over the (variantSet, nextPageToken) pairs defined
by the specified request.
def variantSetsGenerator(self, request):
"""
Returns a generator over the (variantSet, nextPageToken) pairs defined
by the specified request.
"""
dataset = self.getDataRepository().getDataset(request.dataset_id)
return self._topLevelObjectGenerator(
request, dataset.getNumVariantSets(),
dataset.getVariantSetByIndex) |
Returns a generator over the (variantAnnotationSet, nextPageToken)
pairs defined by the specified request.
def variantAnnotationSetsGenerator(self, request):
"""
Returns a generator over the (variantAnnotationSet, nextPageToken)
pairs defined by the specified request.
"""
compoundId = datamodel.VariantSetCompoundId.parse(
request.variant_set_id)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
variantSet = dataset.getVariantSet(request.variant_set_id)
return self._topLevelObjectGenerator(
request, variantSet.getNumVariantAnnotationSets(),
variantSet.getVariantAnnotationSetByIndex) |
Returns a generator over the (read, nextPageToken) pairs defined
by the specified request
def readsGenerator(self, request):
"""
Returns a generator over the (read, nextPageToken) pairs defined
by the specified request
"""
if not request.reference_id:
raise exceptions.UnmappedReadsNotSupported()
if len(request.read_group_ids) < 1:
raise exceptions.BadRequestException(
"At least one readGroupId must be specified")
elif len(request.read_group_ids) == 1:
return self._readsGeneratorSingle(request)
else:
return self._readsGeneratorMultiple(request) |
Returns a generator over the (variant, nextPageToken) pairs defined
by the specified request.
def variantsGenerator(self, request):
"""
Returns a generator over the (variant, nextPageToken) pairs defined
by the specified request.
"""
compoundId = datamodel.VariantSetCompoundId \
.parse(request.variant_set_id)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
variantSet = dataset.getVariantSet(compoundId.variant_set_id)
intervalIterator = paging.VariantsIntervalIterator(
request, variantSet)
return intervalIterator |
Returns a generator over the (variantAnnotaitons, nextPageToken) pairs
defined by the specified request.
def variantAnnotationsGenerator(self, request):
"""
Returns a generator over the (variantAnnotaitons, nextPageToken) pairs
defined by the specified request.
"""
compoundId = datamodel.VariantAnnotationSetCompoundId.parse(
request.variant_annotation_set_id)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
variantSet = dataset.getVariantSet(compoundId.variant_set_id)
variantAnnotationSet = variantSet.getVariantAnnotationSet(
request.variant_annotation_set_id)
iterator = paging.VariantAnnotationsIntervalIterator(
request, variantAnnotationSet)
return iterator |
Returns a generator over the (features, nextPageToken) pairs
defined by the (JSON string) request.
def featuresGenerator(self, request):
"""
Returns a generator over the (features, nextPageToken) pairs
defined by the (JSON string) request.
"""
compoundId = None
parentId = None
if request.feature_set_id != "":
compoundId = datamodel.FeatureSetCompoundId.parse(
request.feature_set_id)
if request.parent_id != "":
compoundParentId = datamodel.FeatureCompoundId.parse(
request.parent_id)
parentId = compoundParentId.featureId
# A client can optionally specify JUST the (compound) parentID,
# and the server needs to derive the dataset & featureSet
# from this (compound) parentID.
if compoundId is None:
compoundId = compoundParentId
else:
# check that the dataset and featureSet of the parent
# compound ID is the same as that of the featureSetId
mismatchCheck = (
compoundParentId.dataset_id != compoundId.dataset_id or
compoundParentId.feature_set_id !=
compoundId.feature_set_id)
if mismatchCheck:
raise exceptions.ParentIncompatibleWithFeatureSet()
if compoundId is None:
raise exceptions.FeatureSetNotSpecifiedException()
dataset = self.getDataRepository().getDataset(
compoundId.dataset_id)
featureSet = dataset.getFeatureSet(compoundId.feature_set_id)
iterator = paging.FeaturesIterator(
request, featureSet, parentId)
return iterator |
Returns a generator over the (continuous, nextPageToken) pairs
defined by the (JSON string) request.
def continuousGenerator(self, request):
"""
Returns a generator over the (continuous, nextPageToken) pairs
defined by the (JSON string) request.
"""
compoundId = None
if request.continuous_set_id != "":
compoundId = datamodel.ContinuousSetCompoundId.parse(
request.continuous_set_id)
if compoundId is None:
raise exceptions.ContinuousSetNotSpecifiedException()
dataset = self.getDataRepository().getDataset(
compoundId.dataset_id)
continuousSet = dataset.getContinuousSet(request.continuous_set_id)
iterator = paging.ContinuousIterator(request, continuousSet)
return iterator |
Returns a generator over the (phenotypes, nextPageToken) pairs
defined by the (JSON string) request
def phenotypesGenerator(self, request):
"""
Returns a generator over the (phenotypes, nextPageToken) pairs
defined by the (JSON string) request
"""
# TODO make paging work using SPARQL?
compoundId = datamodel.PhenotypeAssociationSetCompoundId.parse(
request.phenotype_association_set_id)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
phenotypeAssociationSet = dataset.getPhenotypeAssociationSet(
compoundId.phenotypeAssociationSetId)
associations = phenotypeAssociationSet.getAssociations(request)
phenotypes = [association.phenotype for association in associations]
return self._protocolListGenerator(
request, phenotypes) |
Returns a generator over the (phenotypes, nextPageToken) pairs
defined by the (JSON string) request
def genotypesPhenotypesGenerator(self, request):
"""
Returns a generator over the (phenotypes, nextPageToken) pairs
defined by the (JSON string) request
"""
# TODO make paging work using SPARQL?
compoundId = datamodel.PhenotypeAssociationSetCompoundId.parse(
request.phenotype_association_set_id)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
phenotypeAssociationSet = dataset.getPhenotypeAssociationSet(
compoundId.phenotypeAssociationSetId)
featureSets = dataset.getFeatureSets()
annotationList = phenotypeAssociationSet.getAssociations(
request, featureSets)
return self._protocolListGenerator(request, annotationList) |
Returns a generator over the (callSet, nextPageToken) pairs defined
by the specified request.
def callSetsGenerator(self, request):
"""
Returns a generator over the (callSet, nextPageToken) pairs defined
by the specified request.
"""
compoundId = datamodel.VariantSetCompoundId.parse(
request.variant_set_id)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
variantSet = dataset.getVariantSet(compoundId.variant_set_id)
results = []
for obj in variantSet.getCallSets():
include = True
if request.name:
if request.name != obj.getLocalId():
include = False
if request.biosample_id:
if request.biosample_id != obj.getBiosampleId():
include = False
if include:
results.append(obj)
return self._objectListGenerator(request, results) |
Returns a generator over the (featureSet, nextPageToken) pairs
defined by the specified request.
def featureSetsGenerator(self, request):
"""
Returns a generator over the (featureSet, nextPageToken) pairs
defined by the specified request.
"""
dataset = self.getDataRepository().getDataset(request.dataset_id)
return self._topLevelObjectGenerator(
request, dataset.getNumFeatureSets(),
dataset.getFeatureSetByIndex) |
Returns a generator over the (continuousSet, nextPageToken) pairs
defined by the specified request.
def continuousSetsGenerator(self, request):
"""
Returns a generator over the (continuousSet, nextPageToken) pairs
defined by the specified request.
"""
dataset = self.getDataRepository().getDataset(request.dataset_id)
return self._topLevelObjectGenerator(
request, dataset.getNumContinuousSets(),
dataset.getContinuousSetByIndex) |
Returns a generator over the (rnaQuantificationSet, nextPageToken)
pairs defined by the specified request.
def rnaQuantificationSetsGenerator(self, request):
"""
Returns a generator over the (rnaQuantificationSet, nextPageToken)
pairs defined by the specified request.
"""
dataset = self.getDataRepository().getDataset(request.dataset_id)
return self._topLevelObjectGenerator(
request, dataset.getNumRnaQuantificationSets(),
dataset.getRnaQuantificationSetByIndex) |
Returns a generator over the (rnaQuantification, nextPageToken) pairs
defined by the specified request.
def rnaQuantificationsGenerator(self, request):
"""
Returns a generator over the (rnaQuantification, nextPageToken) pairs
defined by the specified request.
"""
if len(request.rna_quantification_set_id) < 1:
raise exceptions.BadRequestException(
"Rna Quantification Set Id must be specified")
else:
compoundId = datamodel.RnaQuantificationSetCompoundId.parse(
request.rna_quantification_set_id)
dataset = self.getDataRepository().getDataset(
compoundId.dataset_id)
rnaQuantSet = dataset.getRnaQuantificationSet(
compoundId.rna_quantification_set_id)
results = []
for obj in rnaQuantSet.getRnaQuantifications():
include = True
if request.biosample_id:
if request.biosample_id != obj.getBiosampleId():
include = False
if include:
results.append(obj)
return self._objectListGenerator(request, results) |
Returns a generator over the (expressionLevel, nextPageToken) pairs
defined by the specified request.
Currently only supports searching over a specified rnaQuantification
def expressionLevelsGenerator(self, request):
"""
Returns a generator over the (expressionLevel, nextPageToken) pairs
defined by the specified request.
Currently only supports searching over a specified rnaQuantification
"""
rnaQuantificationId = request.rna_quantification_id
compoundId = datamodel.RnaQuantificationCompoundId.parse(
request.rna_quantification_id)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
rnaQuantSet = dataset.getRnaQuantificationSet(
compoundId.rna_quantification_set_id)
rnaQuant = rnaQuantSet.getRnaQuantification(rnaQuantificationId)
rnaQuantificationId = rnaQuant.getLocalId()
iterator = paging.ExpressionLevelsIterator(
request, rnaQuant)
return iterator |
Runs a get request by converting the specified datamodel
object into its protocol representation.
def runGetRequest(self, obj):
"""
Runs a get request by converting the specified datamodel
object into its protocol representation.
"""
protocolElement = obj.toProtocolElement()
jsonString = protocol.toJson(protocolElement)
return jsonString |
Runs the specified request. The request is a string containing
a JSON representation of an instance of the specified requestClass.
We return a string representation of an instance of the specified
responseClass in JSON format. Objects are filled into the page list
using the specified object generator, which must return
(object, nextPageToken) pairs, and be able to resume iteration from
any point using the nextPageToken attribute of the request object.
def runSearchRequest(
self, requestStr, requestClass, responseClass, objectGenerator):
"""
Runs the specified request. The request is a string containing
a JSON representation of an instance of the specified requestClass.
We return a string representation of an instance of the specified
responseClass in JSON format. Objects are filled into the page list
using the specified object generator, which must return
(object, nextPageToken) pairs, and be able to resume iteration from
any point using the nextPageToken attribute of the request object.
"""
self.startProfile()
try:
request = protocol.fromJson(requestStr, requestClass)
except protocol.json_format.ParseError:
raise exceptions.InvalidJsonException(requestStr)
# TODO How do we detect when the page size is not set?
if not request.page_size:
request.page_size = self._defaultPageSize
if request.page_size < 0:
raise exceptions.BadPageSizeException(request.page_size)
responseBuilder = response_builder.SearchResponseBuilder(
responseClass, request.page_size, self._maxResponseLength)
nextPageToken = None
for obj, nextPageToken in objectGenerator(request):
responseBuilder.addValue(obj)
if responseBuilder.isFull():
break
responseBuilder.setNextPageToken(nextPageToken)
responseString = responseBuilder.getSerializedResponse()
self.endProfile()
return responseString |
Runs a listReferenceBases request for the specified ID and
request arguments.
def runListReferenceBases(self, requestJson):
"""
Runs a listReferenceBases request for the specified ID and
request arguments.
"""
# In the case when an empty post request is made to the endpoint
# we instantiate an empty ListReferenceBasesRequest.
if not requestJson:
request = protocol.ListReferenceBasesRequest()
else:
try:
request = protocol.fromJson(
requestJson,
protocol.ListReferenceBasesRequest)
except protocol.json_format.ParseError:
raise exceptions.InvalidJsonException(requestJson)
compoundId = datamodel.ReferenceCompoundId.parse(request.reference_id)
referenceSet = self.getDataRepository().getReferenceSet(
compoundId.reference_set_id)
reference = referenceSet.getReference(request.reference_id)
start = request.start
end = request.end
if end == 0: # assume meant "get all"
end = reference.getLength()
if request.page_token:
pageTokenStr = request.page_token
start = paging._parsePageToken(pageTokenStr, 1)[0]
chunkSize = self._maxResponseLength
nextPageToken = None
if start + chunkSize < end:
end = start + chunkSize
nextPageToken = str(start + chunkSize)
sequence = reference.getBases(start, end)
# build response
response = protocol.ListReferenceBasesResponse()
response.offset = start
response.sequence = sequence
if nextPageToken:
response.next_page_token = nextPageToken
return protocol.toJson(response) |
Returns a callset with the given id
def runGetCallSet(self, id_):
"""
Returns a callset with the given id
"""
compoundId = datamodel.CallSetCompoundId.parse(id_)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
variantSet = dataset.getVariantSet(compoundId.variant_set_id)
callSet = variantSet.getCallSet(id_)
return self.runGetRequest(callSet) |
Returns information about the service including protocol version.
def runGetInfo(self, request):
"""
Returns information about the service including protocol version.
"""
return protocol.toJson(protocol.GetInfoResponse(
protocol_version=protocol.version)) |
Takes a flask request from the frontend and attempts to parse
into an AnnouncePeerRequest. If successful, it will log the
announcement to the `announcement` table with some other metadata
gathered from the request.
def runAddAnnouncement(self, flaskrequest):
"""
Takes a flask request from the frontend and attempts to parse
into an AnnouncePeerRequest. If successful, it will log the
announcement to the `announcement` table with some other metadata
gathered from the request.
"""
announcement = {}
# We want to parse the request ourselves to collect a little more
# data about it.
try:
requestData = protocol.fromJson(
flaskrequest.get_data(), protocol.AnnouncePeerRequest)
announcement['hostname'] = flaskrequest.host_url
announcement['remote_addr'] = flaskrequest.remote_addr
announcement['user_agent'] = flaskrequest.headers.get('User-Agent')
except AttributeError:
# Sometimes in testing we will send protocol requests instead
# of flask requests and so the hostname and user agent won't
# be present.
try:
requestData = protocol.fromJson(
flaskrequest, protocol.AnnouncePeerRequest)
except Exception as e:
raise exceptions.InvalidJsonException(e)
except Exception as e:
raise exceptions.InvalidJsonException(e)
# Validate the url before accepting the announcement
peer = datamodel.peers.Peer(requestData.peer.url)
peer.setAttributesJson(protocol.toJson(
requestData.peer.attributes))
announcement['url'] = peer.getUrl()
announcement['attributes'] = peer.getAttributes()
try:
self.getDataRepository().insertAnnouncement(announcement)
except:
raise exceptions.BadRequestException(announcement['url'])
return protocol.toJson(
protocol.AnnouncePeerResponse(success=True)) |
Takes a ListPeersRequest and returns a ListPeersResponse using
a page_token and page_size if provided.
def runListPeers(self, request):
"""
Takes a ListPeersRequest and returns a ListPeersResponse using
a page_token and page_size if provided.
"""
return self.runSearchRequest(
request,
protocol.ListPeersRequest,
protocol.ListPeersResponse,
self.peersGenerator) |
Returns a variant with the given id
def runGetVariant(self, id_):
"""
Returns a variant with the given id
"""
compoundId = datamodel.VariantCompoundId.parse(id_)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
variantSet = dataset.getVariantSet(compoundId.variant_set_id)
gaVariant = variantSet.getVariant(compoundId)
# TODO variant is a special case here, as it's returning a
# protocol element rather than a datamodel object. We should
# fix this for consistency.
jsonString = protocol.toJson(gaVariant)
return jsonString |
Runs a getBiosample request for the specified ID.
def runGetBiosample(self, id_):
"""
Runs a getBiosample request for the specified ID.
"""
compoundId = datamodel.BiosampleCompoundId.parse(id_)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
biosample = dataset.getBiosample(id_)
return self.runGetRequest(biosample) |
Runs a getIndividual request for the specified ID.
def runGetIndividual(self, id_):
"""
Runs a getIndividual request for the specified ID.
"""
compoundId = datamodel.BiosampleCompoundId.parse(id_)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
individual = dataset.getIndividual(id_)
return self.runGetRequest(individual) |
Returns JSON string of the feature object corresponding to
the feature compoundID passed in.
def runGetFeature(self, id_):
"""
Returns JSON string of the feature object corresponding to
the feature compoundID passed in.
"""
compoundId = datamodel.FeatureCompoundId.parse(id_)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
featureSet = dataset.getFeatureSet(compoundId.feature_set_id)
gaFeature = featureSet.getFeature(compoundId)
jsonString = protocol.toJson(gaFeature)
return jsonString |
Returns a readGroupSet with the given id_
def runGetReadGroupSet(self, id_):
"""
Returns a readGroupSet with the given id_
"""
compoundId = datamodel.ReadGroupSetCompoundId.parse(id_)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
readGroupSet = dataset.getReadGroupSet(id_)
return self.runGetRequest(readGroupSet) |
Returns a read group with the given id_
def runGetReadGroup(self, id_):
"""
Returns a read group with the given id_
"""
compoundId = datamodel.ReadGroupCompoundId.parse(id_)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
readGroupSet = dataset.getReadGroupSet(compoundId.read_group_set_id)
readGroup = readGroupSet.getReadGroup(id_)
return self.runGetRequest(readGroup) |
Runs a getReference request for the specified ID.
def runGetReference(self, id_):
"""
Runs a getReference request for the specified ID.
"""
compoundId = datamodel.ReferenceCompoundId.parse(id_)
referenceSet = self.getDataRepository().getReferenceSet(
compoundId.reference_set_id)
reference = referenceSet.getReference(id_)
return self.runGetRequest(reference) |
Runs a getReferenceSet request for the specified ID.
def runGetReferenceSet(self, id_):
"""
Runs a getReferenceSet request for the specified ID.
"""
referenceSet = self.getDataRepository().getReferenceSet(id_)
return self.runGetRequest(referenceSet) |
Runs a getVariantSet request for the specified ID.
def runGetVariantSet(self, id_):
"""
Runs a getVariantSet request for the specified ID.
"""
compoundId = datamodel.VariantSetCompoundId.parse(id_)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
variantSet = dataset.getVariantSet(id_)
return self.runGetRequest(variantSet) |
Runs a getFeatureSet request for the specified ID.
def runGetFeatureSet(self, id_):
"""
Runs a getFeatureSet request for the specified ID.
"""
compoundId = datamodel.FeatureSetCompoundId.parse(id_)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
featureSet = dataset.getFeatureSet(id_)
return self.runGetRequest(featureSet) |
Runs a getContinuousSet request for the specified ID.
def runGetContinuousSet(self, id_):
"""
Runs a getContinuousSet request for the specified ID.
"""
compoundId = datamodel.ContinuousSetCompoundId.parse(id_)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
continuousSet = dataset.getContinuousSet(id_)
return self.runGetRequest(continuousSet) |
Runs a getDataset request for the specified ID.
def runGetDataset(self, id_):
"""
Runs a getDataset request for the specified ID.
"""
dataset = self.getDataRepository().getDataset(id_)
return self.runGetRequest(dataset) |
Runs a getVariantSet request for the specified ID.
def runGetVariantAnnotationSet(self, id_):
"""
Runs a getVariantSet request for the specified ID.
"""
compoundId = datamodel.VariantAnnotationSetCompoundId.parse(id_)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
variantSet = dataset.getVariantSet(compoundId.variant_set_id)
variantAnnotationSet = variantSet.getVariantAnnotationSet(id_)
return self.runGetRequest(variantAnnotationSet) |
Runs a getRnaQuantification request for the specified ID.
def runGetRnaQuantification(self, id_):
"""
Runs a getRnaQuantification request for the specified ID.
"""
compoundId = datamodel.RnaQuantificationCompoundId.parse(id_)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
rnaQuantificationSet = dataset.getRnaQuantificationSet(
compoundId.rna_quantification_set_id)
rnaQuantification = rnaQuantificationSet.getRnaQuantification(id_)
return self.runGetRequest(rnaQuantification) |
Runs a getRnaQuantificationSet request for the specified ID.
def runGetRnaQuantificationSet(self, id_):
"""
Runs a getRnaQuantificationSet request for the specified ID.
"""
compoundId = datamodel.RnaQuantificationSetCompoundId.parse(id_)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
rnaQuantificationSet = dataset.getRnaQuantificationSet(id_)
return self.runGetRequest(rnaQuantificationSet) |
Runs a getExpressionLevel request for the specified ID.
def runGetExpressionLevel(self, id_):
"""
Runs a getExpressionLevel request for the specified ID.
"""
compoundId = datamodel.ExpressionLevelCompoundId.parse(id_)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
rnaQuantificationSet = dataset.getRnaQuantificationSet(
compoundId.rna_quantification_set_id)
rnaQuantification = rnaQuantificationSet.getRnaQuantification(
compoundId.rna_quantification_id)
expressionLevel = rnaQuantification.getExpressionLevel(compoundId)
return self.runGetRequest(expressionLevel) |
Runs the specified SearchReadGroupSetsRequest.
def runSearchReadGroupSets(self, request):
"""
Runs the specified SearchReadGroupSetsRequest.
"""
return self.runSearchRequest(
request, protocol.SearchReadGroupSetsRequest,
protocol.SearchReadGroupSetsResponse,
self.readGroupSetsGenerator) |
Runs the specified search SearchIndividualsRequest.
def runSearchIndividuals(self, request):
"""
Runs the specified search SearchIndividualsRequest.
"""
return self.runSearchRequest(
request, protocol.SearchIndividualsRequest,
protocol.SearchIndividualsResponse,
self.individualsGenerator) |
Runs the specified SearchBiosamplesRequest.
def runSearchBiosamples(self, request):
"""
Runs the specified SearchBiosamplesRequest.
"""
return self.runSearchRequest(
request, protocol.SearchBiosamplesRequest,
protocol.SearchBiosamplesResponse,
self.biosamplesGenerator) |
Runs the specified SearchReadsRequest.
def runSearchReads(self, request):
"""
Runs the specified SearchReadsRequest.
"""
return self.runSearchRequest(
request, protocol.SearchReadsRequest,
protocol.SearchReadsResponse,
self.readsGenerator) |
Runs the specified SearchReferenceSetsRequest.
def runSearchReferenceSets(self, request):
"""
Runs the specified SearchReferenceSetsRequest.
"""
return self.runSearchRequest(
request, protocol.SearchReferenceSetsRequest,
protocol.SearchReferenceSetsResponse,
self.referenceSetsGenerator) |
Runs the specified SearchReferenceRequest.
def runSearchReferences(self, request):
"""
Runs the specified SearchReferenceRequest.
"""
return self.runSearchRequest(
request, protocol.SearchReferencesRequest,
protocol.SearchReferencesResponse,
self.referencesGenerator) |
Runs the specified SearchVariantSetsRequest.
def runSearchVariantSets(self, request):
"""
Runs the specified SearchVariantSetsRequest.
"""
return self.runSearchRequest(
request, protocol.SearchVariantSetsRequest,
protocol.SearchVariantSetsResponse,
self.variantSetsGenerator) |
Runs the specified SearchVariantAnnotationSetsRequest.
def runSearchVariantAnnotationSets(self, request):
"""
Runs the specified SearchVariantAnnotationSetsRequest.
"""
return self.runSearchRequest(
request, protocol.SearchVariantAnnotationSetsRequest,
protocol.SearchVariantAnnotationSetsResponse,
self.variantAnnotationSetsGenerator) |
Runs the specified SearchVariantRequest.
def runSearchVariants(self, request):
"""
Runs the specified SearchVariantRequest.
"""
return self.runSearchRequest(
request, protocol.SearchVariantsRequest,
protocol.SearchVariantsResponse,
self.variantsGenerator) |
Runs the specified SearchVariantAnnotationsRequest.
def runSearchVariantAnnotations(self, request):
"""
Runs the specified SearchVariantAnnotationsRequest.
"""
return self.runSearchRequest(
request, protocol.SearchVariantAnnotationsRequest,
protocol.SearchVariantAnnotationsResponse,
self.variantAnnotationsGenerator) |
Runs the specified SearchCallSetsRequest.
def runSearchCallSets(self, request):
"""
Runs the specified SearchCallSetsRequest.
"""
return self.runSearchRequest(
request, protocol.SearchCallSetsRequest,
protocol.SearchCallSetsResponse,
self.callSetsGenerator) |
Runs the specified SearchDatasetsRequest.
def runSearchDatasets(self, request):
"""
Runs the specified SearchDatasetsRequest.
"""
return self.runSearchRequest(
request, protocol.SearchDatasetsRequest,
protocol.SearchDatasetsResponse,
self.datasetsGenerator) |
Returns a SearchFeatureSetsResponse for the specified
SearchFeatureSetsRequest object.
def runSearchFeatureSets(self, request):
"""
Returns a SearchFeatureSetsResponse for the specified
SearchFeatureSetsRequest object.
"""
return self.runSearchRequest(
request, protocol.SearchFeatureSetsRequest,
protocol.SearchFeatureSetsResponse,
self.featureSetsGenerator) |
Returns a SearchFeaturesResponse for the specified
SearchFeaturesRequest object.
:param request: JSON string representing searchFeaturesRequest
:return: JSON string representing searchFeatureResponse
def runSearchFeatures(self, request):
"""
Returns a SearchFeaturesResponse for the specified
SearchFeaturesRequest object.
:param request: JSON string representing searchFeaturesRequest
:return: JSON string representing searchFeatureResponse
"""
return self.runSearchRequest(
request, protocol.SearchFeaturesRequest,
protocol.SearchFeaturesResponse,
self.featuresGenerator) |
Returns a SearchContinuousSetsResponse for the specified
SearchContinuousSetsRequest object.
def runSearchContinuousSets(self, request):
"""
Returns a SearchContinuousSetsResponse for the specified
SearchContinuousSetsRequest object.
"""
return self.runSearchRequest(
request, protocol.SearchContinuousSetsRequest,
protocol.SearchContinuousSetsResponse,
self.continuousSetsGenerator) |
Returns a SearchContinuousResponse for the specified
SearchContinuousRequest object.
:param request: JSON string representing searchContinuousRequest
:return: JSON string representing searchContinuousResponse
def runSearchContinuous(self, request):
"""
Returns a SearchContinuousResponse for the specified
SearchContinuousRequest object.
:param request: JSON string representing searchContinuousRequest
:return: JSON string representing searchContinuousResponse
"""
return self.runSearchRequest(
request, protocol.SearchContinuousRequest,
protocol.SearchContinuousResponse,
self.continuousGenerator) |
Returns a SearchRnaQuantificationSetsResponse for the specified
SearchRnaQuantificationSetsRequest object.
def runSearchRnaQuantificationSets(self, request):
"""
Returns a SearchRnaQuantificationSetsResponse for the specified
SearchRnaQuantificationSetsRequest object.
"""
return self.runSearchRequest(
request, protocol.SearchRnaQuantificationSetsRequest,
protocol.SearchRnaQuantificationSetsResponse,
self.rnaQuantificationSetsGenerator) |
Returns a SearchRnaQuantificationResponse for the specified
SearchRnaQuantificationRequest object.
def runSearchRnaQuantifications(self, request):
"""
Returns a SearchRnaQuantificationResponse for the specified
SearchRnaQuantificationRequest object.
"""
return self.runSearchRequest(
request, protocol.SearchRnaQuantificationsRequest,
protocol.SearchRnaQuantificationsResponse,
self.rnaQuantificationsGenerator) |
Returns a SearchExpressionLevelResponse for the specified
SearchExpressionLevelRequest object.
def runSearchExpressionLevels(self, request):
"""
Returns a SearchExpressionLevelResponse for the specified
SearchExpressionLevelRequest object.
"""
return self.runSearchRequest(
request, protocol.SearchExpressionLevelsRequest,
protocol.SearchExpressionLevelsResponse,
self.expressionLevelsGenerator) |
Populates the instance variables of this Dataset from the
specified database row.
def populateFromRow(self, dataset):
"""
Populates the instance variables of this Dataset from the
specified database row.
"""
self._description = dataset.description
self.setAttributesJson(dataset.attributes) |
Adds the specified variantSet to this dataset.
def addVariantSet(self, variantSet):
"""
Adds the specified variantSet to this dataset.
"""
id_ = variantSet.getId()
self._variantSetIdMap[id_] = variantSet
self._variantSetNameMap[variantSet.getLocalId()] = variantSet
self._variantSetIds.append(id_) |
Adds the specified biosample to this dataset.
def addBiosample(self, biosample):
"""
Adds the specified biosample to this dataset.
"""
id_ = biosample.getId()
self._biosampleIdMap[id_] = biosample
self._biosampleIds.append(id_)
self._biosampleNameMap[biosample.getName()] = biosample |
Adds the specified individual to this dataset.
def addIndividual(self, individual):
"""
Adds the specified individual to this dataset.
"""
id_ = individual.getId()
self._individualIdMap[id_] = individual
self._individualIds.append(id_)
self._individualNameMap[individual.getName()] = individual |
Adds the specified featureSet to this dataset.
def addFeatureSet(self, featureSet):
"""
Adds the specified featureSet to this dataset.
"""
id_ = featureSet.getId()
self._featureSetIdMap[id_] = featureSet
self._featureSetIds.append(id_)
name = featureSet.getLocalId()
self._featureSetNameMap[name] = featureSet |
Adds the specified continuousSet to this dataset.
def addContinuousSet(self, continuousSet):
"""
Adds the specified continuousSet to this dataset.
"""
id_ = continuousSet.getId()
self._continuousSetIdMap[id_] = continuousSet
self._continuousSetIds.append(id_)
name = continuousSet.getLocalId()
self._continuousSetNameMap[name] = continuousSet |
Adds the specified readGroupSet to this dataset.
def addReadGroupSet(self, readGroupSet):
"""
Adds the specified readGroupSet to this dataset.
"""
id_ = readGroupSet.getId()
self._readGroupSetIdMap[id_] = readGroupSet
self._readGroupSetNameMap[readGroupSet.getLocalId()] = readGroupSet
self._readGroupSetIds.append(id_) |
Adds the specified rnaQuantification set to this dataset.
def addRnaQuantificationSet(self, rnaQuantSet):
"""
Adds the specified rnaQuantification set to this dataset.
"""
id_ = rnaQuantSet.getId()
self._rnaQuantificationSetIdMap[id_] = rnaQuantSet
self._rnaQuantificationSetIds.append(id_)
name = rnaQuantSet.getLocalId()
self._rnaQuantificationSetNameMap[name] = rnaQuantSet |
Returns the VariantSet with the specified name, or raises a
VariantSetNotFoundException otherwise.
def getVariantSet(self, id_):
"""
Returns the VariantSet with the specified name, or raises a
VariantSetNotFoundException otherwise.
"""
if id_ not in self._variantSetIdMap:
raise exceptions.VariantSetNotFoundException(id_)
return self._variantSetIdMap[id_] |
Returns a VariantSet with the specified name, or raises a
VariantSetNameNotFoundException if it does not exist.
def getVariantSetByName(self, name):
"""
Returns a VariantSet with the specified name, or raises a
VariantSetNameNotFoundException if it does not exist.
"""
if name not in self._variantSetNameMap:
raise exceptions.VariantSetNameNotFoundException(name)
return self._variantSetNameMap[name] |
Adds the specified g2p association set to this backend.
def addPhenotypeAssociationSet(self, phenotypeAssociationSet):
"""
Adds the specified g2p association set to this backend.
"""
id_ = phenotypeAssociationSet.getId()
self._phenotypeAssociationSetIdMap[id_] = phenotypeAssociationSet
self._phenotypeAssociationSetNameMap[
phenotypeAssociationSet.getLocalId()] = phenotypeAssociationSet
self._phenotypeAssociationSetIds.append(id_) |
Returns the FeatureSet with the specified id, or raises a
FeatureSetNotFoundException otherwise.
def getFeatureSet(self, id_):
"""
Returns the FeatureSet with the specified id, or raises a
FeatureSetNotFoundException otherwise.
"""
if id_ not in self._featureSetIdMap:
raise exceptions.FeatureSetNotFoundException(id_)
return self._featureSetIdMap[id_] |
Returns the FeatureSet with the specified name, or raises
an exception otherwise.
def getFeatureSetByName(self, name):
"""
Returns the FeatureSet with the specified name, or raises
an exception otherwise.
"""
if name not in self._featureSetNameMap:
raise exceptions.FeatureSetNameNotFoundException(name)
return self._featureSetNameMap[name] |
Returns the ContinuousSet with the specified id, or raises a
ContinuousSetNotFoundException otherwise.
def getContinuousSet(self, id_):
"""
Returns the ContinuousSet with the specified id, or raises a
ContinuousSetNotFoundException otherwise.
"""
if id_ not in self._continuousSetIdMap:
raise exceptions.ContinuousSetNotFoundException(id_)
return self._continuousSetIdMap[id_] |
Returns the ContinuousSet with the specified name, or raises
an exception otherwise.
def getContinuousSetByName(self, name):
"""
Returns the ContinuousSet with the specified name, or raises
an exception otherwise.
"""
if name not in self._continuousSetNameMap:
raise exceptions.ContinuousSetNameNotFoundException(name)
return self._continuousSetNameMap[name] |
Returns a Biosample with the specified name, or raises a
BiosampleNameNotFoundException if it does not exist.
def getBiosampleByName(self, name):
"""
Returns a Biosample with the specified name, or raises a
BiosampleNameNotFoundException if it does not exist.
"""
if name not in self._biosampleNameMap:
raise exceptions.BiosampleNameNotFoundException(name)
return self._biosampleNameMap[name] |
Returns the Biosample with the specified id, or raises
a BiosampleNotFoundException otherwise.
def getBiosample(self, id_):
"""
Returns the Biosample with the specified id, or raises
a BiosampleNotFoundException otherwise.
"""
if id_ not in self._biosampleIdMap:
raise exceptions.BiosampleNotFoundException(id_)
return self._biosampleIdMap[id_] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.