repo stringlengths 7 55 | path stringlengths 4 223 | func_name stringlengths 1 134 | original_string stringlengths 75 104k | language stringclasses 1 value | code stringlengths 75 104k | code_tokens listlengths 19 28.4k | docstring stringlengths 1 46.9k | docstring_tokens listlengths 1 1.97k | sha stringlengths 40 40 | url stringlengths 87 315 | partition stringclasses 1 value |
|---|---|---|---|---|---|---|---|---|---|---|---|
ga4gh/ga4gh-server | ga4gh/server/gff3.py | Gff3Parser._parseAttrs | def _parseAttrs(self, attrsStr):
"""
Parse the attributes and values
"""
attributes = dict()
for attrStr in self.SPLIT_ATTR_COL_RE.split(attrsStr):
name, vals = self._parseAttrVal(attrStr)
if name in attributes:
raise GFF3Exception(
"duplicated attribute name: {}".format(name),
self.fileName, self.lineNumber)
attributes[name] = vals
return attributes | python | def _parseAttrs(self, attrsStr):
"""
Parse the attributes and values
"""
attributes = dict()
for attrStr in self.SPLIT_ATTR_COL_RE.split(attrsStr):
name, vals = self._parseAttrVal(attrStr)
if name in attributes:
raise GFF3Exception(
"duplicated attribute name: {}".format(name),
self.fileName, self.lineNumber)
attributes[name] = vals
return attributes | [
"def",
"_parseAttrs",
"(",
"self",
",",
"attrsStr",
")",
":",
"attributes",
"=",
"dict",
"(",
")",
"for",
"attrStr",
"in",
"self",
".",
"SPLIT_ATTR_COL_RE",
".",
"split",
"(",
"attrsStr",
")",
":",
"name",
",",
"vals",
"=",
"self",
".",
"_parseAttrVal",
"(",
"attrStr",
")",
"if",
"name",
"in",
"attributes",
":",
"raise",
"GFF3Exception",
"(",
"\"duplicated attribute name: {}\"",
".",
"format",
"(",
"name",
")",
",",
"self",
".",
"fileName",
",",
"self",
".",
"lineNumber",
")",
"attributes",
"[",
"name",
"]",
"=",
"vals",
"return",
"attributes"
] | Parse the attributes and values | [
"Parse",
"the",
"attributes",
"and",
"values"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L272-L284 | train |
ga4gh/ga4gh-server | ga4gh/server/gff3.py | Gff3Parser._parseRecord | def _parseRecord(self, gff3Set, line):
"""
Parse one record.
"""
row = line.split("\t")
if len(row) != self.GFF3_NUM_COLS:
raise GFF3Exception(
"Wrong number of columns, expected {}, got {}".format(
self.GFF3_NUM_COLS, len(row)),
self.fileName, self.lineNumber)
feature = Feature(
urllib.unquote(row[0]),
urllib.unquote(row[1]),
urllib.unquote(row[2]),
int(row[3]), int(row[4]),
row[5], row[6], row[7],
self._parseAttrs(row[8]))
gff3Set.add(feature) | python | def _parseRecord(self, gff3Set, line):
"""
Parse one record.
"""
row = line.split("\t")
if len(row) != self.GFF3_NUM_COLS:
raise GFF3Exception(
"Wrong number of columns, expected {}, got {}".format(
self.GFF3_NUM_COLS, len(row)),
self.fileName, self.lineNumber)
feature = Feature(
urllib.unquote(row[0]),
urllib.unquote(row[1]),
urllib.unquote(row[2]),
int(row[3]), int(row[4]),
row[5], row[6], row[7],
self._parseAttrs(row[8]))
gff3Set.add(feature) | [
"def",
"_parseRecord",
"(",
"self",
",",
"gff3Set",
",",
"line",
")",
":",
"row",
"=",
"line",
".",
"split",
"(",
"\"\\t\"",
")",
"if",
"len",
"(",
"row",
")",
"!=",
"self",
".",
"GFF3_NUM_COLS",
":",
"raise",
"GFF3Exception",
"(",
"\"Wrong number of columns, expected {}, got {}\"",
".",
"format",
"(",
"self",
".",
"GFF3_NUM_COLS",
",",
"len",
"(",
"row",
")",
")",
",",
"self",
".",
"fileName",
",",
"self",
".",
"lineNumber",
")",
"feature",
"=",
"Feature",
"(",
"urllib",
".",
"unquote",
"(",
"row",
"[",
"0",
"]",
")",
",",
"urllib",
".",
"unquote",
"(",
"row",
"[",
"1",
"]",
")",
",",
"urllib",
".",
"unquote",
"(",
"row",
"[",
"2",
"]",
")",
",",
"int",
"(",
"row",
"[",
"3",
"]",
")",
",",
"int",
"(",
"row",
"[",
"4",
"]",
")",
",",
"row",
"[",
"5",
"]",
",",
"row",
"[",
"6",
"]",
",",
"row",
"[",
"7",
"]",
",",
"self",
".",
"_parseAttrs",
"(",
"row",
"[",
"8",
"]",
")",
")",
"gff3Set",
".",
"add",
"(",
"feature",
")"
] | Parse one record. | [
"Parse",
"one",
"record",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L288-L305 | train |
ga4gh/ga4gh-server | ga4gh/server/gff3.py | Gff3Parser.parse | def parse(self):
"""
Run the parse and return the resulting Gff3Set object.
"""
fh = self._open()
try:
gff3Set = Gff3Set(self.fileName)
for line in fh:
self.lineNumber += 1
self._parseLine(gff3Set, line[0:-1])
finally:
fh.close()
gff3Set.linkChildFeaturesToParents()
return gff3Set | python | def parse(self):
"""
Run the parse and return the resulting Gff3Set object.
"""
fh = self._open()
try:
gff3Set = Gff3Set(self.fileName)
for line in fh:
self.lineNumber += 1
self._parseLine(gff3Set, line[0:-1])
finally:
fh.close()
gff3Set.linkChildFeaturesToParents()
return gff3Set | [
"def",
"parse",
"(",
"self",
")",
":",
"fh",
"=",
"self",
".",
"_open",
"(",
")",
"try",
":",
"gff3Set",
"=",
"Gff3Set",
"(",
"self",
".",
"fileName",
")",
"for",
"line",
"in",
"fh",
":",
"self",
".",
"lineNumber",
"+=",
"1",
"self",
".",
"_parseLine",
"(",
"gff3Set",
",",
"line",
"[",
"0",
":",
"-",
"1",
"]",
")",
"finally",
":",
"fh",
".",
"close",
"(",
")",
"gff3Set",
".",
"linkChildFeaturesToParents",
"(",
")",
"return",
"gff3Set"
] | Run the parse and return the resulting Gff3Set object. | [
"Run",
"the",
"parse",
"and",
"return",
"the",
"resulting",
"Gff3Set",
"object",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L327-L340 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.addDataset | def addDataset(self, dataset):
"""
Adds the specified dataset to this data repository.
"""
id_ = dataset.getId()
self._datasetIdMap[id_] = dataset
self._datasetNameMap[dataset.getLocalId()] = dataset
self._datasetIds.append(id_) | python | def addDataset(self, dataset):
"""
Adds the specified dataset to this data repository.
"""
id_ = dataset.getId()
self._datasetIdMap[id_] = dataset
self._datasetNameMap[dataset.getLocalId()] = dataset
self._datasetIds.append(id_) | [
"def",
"addDataset",
"(",
"self",
",",
"dataset",
")",
":",
"id_",
"=",
"dataset",
".",
"getId",
"(",
")",
"self",
".",
"_datasetIdMap",
"[",
"id_",
"]",
"=",
"dataset",
"self",
".",
"_datasetNameMap",
"[",
"dataset",
".",
"getLocalId",
"(",
")",
"]",
"=",
"dataset",
"self",
".",
"_datasetIds",
".",
"append",
"(",
"id_",
")"
] | Adds the specified dataset to this data repository. | [
"Adds",
"the",
"specified",
"dataset",
"to",
"this",
"data",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L50-L57 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.addReferenceSet | def addReferenceSet(self, referenceSet):
"""
Adds the specified reference set to this data repository.
"""
id_ = referenceSet.getId()
self._referenceSetIdMap[id_] = referenceSet
self._referenceSetNameMap[referenceSet.getLocalId()] = referenceSet
self._referenceSetIds.append(id_) | python | def addReferenceSet(self, referenceSet):
"""
Adds the specified reference set to this data repository.
"""
id_ = referenceSet.getId()
self._referenceSetIdMap[id_] = referenceSet
self._referenceSetNameMap[referenceSet.getLocalId()] = referenceSet
self._referenceSetIds.append(id_) | [
"def",
"addReferenceSet",
"(",
"self",
",",
"referenceSet",
")",
":",
"id_",
"=",
"referenceSet",
".",
"getId",
"(",
")",
"self",
".",
"_referenceSetIdMap",
"[",
"id_",
"]",
"=",
"referenceSet",
"self",
".",
"_referenceSetNameMap",
"[",
"referenceSet",
".",
"getLocalId",
"(",
")",
"]",
"=",
"referenceSet",
"self",
".",
"_referenceSetIds",
".",
"append",
"(",
"id_",
")"
] | Adds the specified reference set to this data repository. | [
"Adds",
"the",
"specified",
"reference",
"set",
"to",
"this",
"data",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L59-L66 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.addOntology | def addOntology(self, ontology):
"""
Add an ontology map to this data repository.
"""
self._ontologyNameMap[ontology.getName()] = ontology
self._ontologyIdMap[ontology.getId()] = ontology
self._ontologyIds.append(ontology.getId()) | python | def addOntology(self, ontology):
"""
Add an ontology map to this data repository.
"""
self._ontologyNameMap[ontology.getName()] = ontology
self._ontologyIdMap[ontology.getId()] = ontology
self._ontologyIds.append(ontology.getId()) | [
"def",
"addOntology",
"(",
"self",
",",
"ontology",
")",
":",
"self",
".",
"_ontologyNameMap",
"[",
"ontology",
".",
"getName",
"(",
")",
"]",
"=",
"ontology",
"self",
".",
"_ontologyIdMap",
"[",
"ontology",
".",
"getId",
"(",
")",
"]",
"=",
"ontology",
"self",
".",
"_ontologyIds",
".",
"append",
"(",
"ontology",
".",
"getId",
"(",
")",
")"
] | Add an ontology map to this data repository. | [
"Add",
"an",
"ontology",
"map",
"to",
"this",
"data",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L68-L74 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.getPeer | def getPeer(self, url):
"""
Select the first peer in the datarepo with the given url simulating
the behavior of selecting by URL. This is only used during testing.
"""
peers = filter(lambda x: x.getUrl() == url, self.getPeers())
if len(peers) == 0:
raise exceptions.PeerNotFoundException(url)
return peers[0] | python | def getPeer(self, url):
"""
Select the first peer in the datarepo with the given url simulating
the behavior of selecting by URL. This is only used during testing.
"""
peers = filter(lambda x: x.getUrl() == url, self.getPeers())
if len(peers) == 0:
raise exceptions.PeerNotFoundException(url)
return peers[0] | [
"def",
"getPeer",
"(",
"self",
",",
"url",
")",
":",
"peers",
"=",
"filter",
"(",
"lambda",
"x",
":",
"x",
".",
"getUrl",
"(",
")",
"==",
"url",
",",
"self",
".",
"getPeers",
"(",
")",
")",
"if",
"len",
"(",
"peers",
")",
"==",
"0",
":",
"raise",
"exceptions",
".",
"PeerNotFoundException",
"(",
"url",
")",
"return",
"peers",
"[",
"0",
"]"
] | Select the first peer in the datarepo with the given url simulating
the behavior of selecting by URL. This is only used during testing. | [
"Select",
"the",
"first",
"peer",
"in",
"the",
"datarepo",
"with",
"the",
"given",
"url",
"simulating",
"the",
"behavior",
"of",
"selecting",
"by",
"URL",
".",
"This",
"is",
"only",
"used",
"during",
"testing",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L89-L97 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.getDataset | def getDataset(self, id_):
"""
Returns a dataset with the specified ID, or raises a
DatasetNotFoundException if it does not exist.
"""
if id_ not in self._datasetIdMap:
raise exceptions.DatasetNotFoundException(id_)
return self._datasetIdMap[id_] | python | def getDataset(self, id_):
"""
Returns a dataset with the specified ID, or raises a
DatasetNotFoundException if it does not exist.
"""
if id_ not in self._datasetIdMap:
raise exceptions.DatasetNotFoundException(id_)
return self._datasetIdMap[id_] | [
"def",
"getDataset",
"(",
"self",
",",
"id_",
")",
":",
"if",
"id_",
"not",
"in",
"self",
".",
"_datasetIdMap",
":",
"raise",
"exceptions",
".",
"DatasetNotFoundException",
"(",
"id_",
")",
"return",
"self",
".",
"_datasetIdMap",
"[",
"id_",
"]"
] | Returns a dataset with the specified ID, or raises a
DatasetNotFoundException if it does not exist. | [
"Returns",
"a",
"dataset",
"with",
"the",
"specified",
"ID",
"or",
"raises",
"a",
"DatasetNotFoundException",
"if",
"it",
"does",
"not",
"exist",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L119-L126 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.getDatasetByName | def getDatasetByName(self, name):
"""
Returns the dataset with the specified name.
"""
if name not in self._datasetNameMap:
raise exceptions.DatasetNameNotFoundException(name)
return self._datasetNameMap[name] | python | def getDatasetByName(self, name):
"""
Returns the dataset with the specified name.
"""
if name not in self._datasetNameMap:
raise exceptions.DatasetNameNotFoundException(name)
return self._datasetNameMap[name] | [
"def",
"getDatasetByName",
"(",
"self",
",",
"name",
")",
":",
"if",
"name",
"not",
"in",
"self",
".",
"_datasetNameMap",
":",
"raise",
"exceptions",
".",
"DatasetNameNotFoundException",
"(",
"name",
")",
"return",
"self",
".",
"_datasetNameMap",
"[",
"name",
"]"
] | Returns the dataset with the specified name. | [
"Returns",
"the",
"dataset",
"with",
"the",
"specified",
"name",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L134-L140 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.getOntology | def getOntology(self, id_):
"""
Returns the ontology with the specified ID.
"""
if id_ not in self._ontologyIdMap:
raise exceptions.OntologyNotFoundException(id_)
return self._ontologyIdMap[id_] | python | def getOntology(self, id_):
"""
Returns the ontology with the specified ID.
"""
if id_ not in self._ontologyIdMap:
raise exceptions.OntologyNotFoundException(id_)
return self._ontologyIdMap[id_] | [
"def",
"getOntology",
"(",
"self",
",",
"id_",
")",
":",
"if",
"id_",
"not",
"in",
"self",
".",
"_ontologyIdMap",
":",
"raise",
"exceptions",
".",
"OntologyNotFoundException",
"(",
"id_",
")",
"return",
"self",
".",
"_ontologyIdMap",
"[",
"id_",
"]"
] | Returns the ontology with the specified ID. | [
"Returns",
"the",
"ontology",
"with",
"the",
"specified",
"ID",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L154-L160 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.getOntologyByName | def getOntologyByName(self, name):
"""
Returns an ontology by name
"""
if name not in self._ontologyNameMap:
raise exceptions.OntologyNameNotFoundException(name)
return self._ontologyNameMap[name] | python | def getOntologyByName(self, name):
"""
Returns an ontology by name
"""
if name not in self._ontologyNameMap:
raise exceptions.OntologyNameNotFoundException(name)
return self._ontologyNameMap[name] | [
"def",
"getOntologyByName",
"(",
"self",
",",
"name",
")",
":",
"if",
"name",
"not",
"in",
"self",
".",
"_ontologyNameMap",
":",
"raise",
"exceptions",
".",
"OntologyNameNotFoundException",
"(",
"name",
")",
"return",
"self",
".",
"_ontologyNameMap",
"[",
"name",
"]"
] | Returns an ontology by name | [
"Returns",
"an",
"ontology",
"by",
"name"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L162-L168 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.getReferenceSet | def getReferenceSet(self, id_):
"""
Retuns the ReferenceSet with the specified ID, or raises a
ReferenceSetNotFoundException if it does not exist.
"""
if id_ not in self._referenceSetIdMap:
raise exceptions.ReferenceSetNotFoundException(id_)
return self._referenceSetIdMap[id_] | python | def getReferenceSet(self, id_):
"""
Retuns the ReferenceSet with the specified ID, or raises a
ReferenceSetNotFoundException if it does not exist.
"""
if id_ not in self._referenceSetIdMap:
raise exceptions.ReferenceSetNotFoundException(id_)
return self._referenceSetIdMap[id_] | [
"def",
"getReferenceSet",
"(",
"self",
",",
"id_",
")",
":",
"if",
"id_",
"not",
"in",
"self",
".",
"_referenceSetIdMap",
":",
"raise",
"exceptions",
".",
"ReferenceSetNotFoundException",
"(",
"id_",
")",
"return",
"self",
".",
"_referenceSetIdMap",
"[",
"id_",
"]"
] | Retuns the ReferenceSet with the specified ID, or raises a
ReferenceSetNotFoundException if it does not exist. | [
"Retuns",
"the",
"ReferenceSet",
"with",
"the",
"specified",
"ID",
"or",
"raises",
"a",
"ReferenceSetNotFoundException",
"if",
"it",
"does",
"not",
"exist",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L176-L183 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.getReferenceSetByName | def getReferenceSetByName(self, name):
"""
Returns the reference set with the specified name.
"""
if name not in self._referenceSetNameMap:
raise exceptions.ReferenceSetNameNotFoundException(name)
return self._referenceSetNameMap[name] | python | def getReferenceSetByName(self, name):
"""
Returns the reference set with the specified name.
"""
if name not in self._referenceSetNameMap:
raise exceptions.ReferenceSetNameNotFoundException(name)
return self._referenceSetNameMap[name] | [
"def",
"getReferenceSetByName",
"(",
"self",
",",
"name",
")",
":",
"if",
"name",
"not",
"in",
"self",
".",
"_referenceSetNameMap",
":",
"raise",
"exceptions",
".",
"ReferenceSetNameNotFoundException",
"(",
"name",
")",
"return",
"self",
".",
"_referenceSetNameMap",
"[",
"name",
"]"
] | Returns the reference set with the specified name. | [
"Returns",
"the",
"reference",
"set",
"with",
"the",
"specified",
"name",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L191-L197 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.getReadGroupSet | def getReadGroupSet(self, id_):
"""
Returns the readgroup set with the specified ID.
"""
compoundId = datamodel.ReadGroupSetCompoundId.parse(id_)
dataset = self.getDataset(compoundId.dataset_id)
return dataset.getReadGroupSet(id_) | python | def getReadGroupSet(self, id_):
"""
Returns the readgroup set with the specified ID.
"""
compoundId = datamodel.ReadGroupSetCompoundId.parse(id_)
dataset = self.getDataset(compoundId.dataset_id)
return dataset.getReadGroupSet(id_) | [
"def",
"getReadGroupSet",
"(",
"self",
",",
"id_",
")",
":",
"compoundId",
"=",
"datamodel",
".",
"ReadGroupSetCompoundId",
".",
"parse",
"(",
"id_",
")",
"dataset",
"=",
"self",
".",
"getDataset",
"(",
"compoundId",
".",
"dataset_id",
")",
"return",
"dataset",
".",
"getReadGroupSet",
"(",
"id_",
")"
] | Returns the readgroup set with the specified ID. | [
"Returns",
"the",
"readgroup",
"set",
"with",
"the",
"specified",
"ID",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L199-L205 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.getVariantSet | def getVariantSet(self, id_):
"""
Returns the readgroup set with the specified ID.
"""
compoundId = datamodel.VariantSetCompoundId.parse(id_)
dataset = self.getDataset(compoundId.dataset_id)
return dataset.getVariantSet(id_) | python | def getVariantSet(self, id_):
"""
Returns the readgroup set with the specified ID.
"""
compoundId = datamodel.VariantSetCompoundId.parse(id_)
dataset = self.getDataset(compoundId.dataset_id)
return dataset.getVariantSet(id_) | [
"def",
"getVariantSet",
"(",
"self",
",",
"id_",
")",
":",
"compoundId",
"=",
"datamodel",
".",
"VariantSetCompoundId",
".",
"parse",
"(",
"id_",
")",
"dataset",
"=",
"self",
".",
"getDataset",
"(",
"compoundId",
".",
"dataset_id",
")",
"return",
"dataset",
".",
"getVariantSet",
"(",
"id_",
")"
] | Returns the readgroup set with the specified ID. | [
"Returns",
"the",
"readgroup",
"set",
"with",
"the",
"specified",
"ID",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L207-L213 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.printSummary | def printSummary(self):
"""
Prints a summary of this data repository to stdout.
"""
print("Ontologies:")
for ontology in self.getOntologys():
print(
"",
ontology.getOntologyPrefix(),
ontology.getName(),
ontology.getDataUrl(),
sep="\t")
print("ReferenceSets:")
for referenceSet in self.getReferenceSets():
print(
"", referenceSet.getLocalId(), referenceSet.getId(),
referenceSet.getDescription(), referenceSet.getDataUrl(),
sep="\t")
for reference in referenceSet.getReferences():
print(
"\t", reference.getLocalId(), reference.getId(),
sep="\t")
print("Datasets:")
for dataset in self.getDatasets():
print(
"", dataset.getLocalId(), dataset.getId(),
dataset.getDescription(), sep="\t")
print("\tReadGroupSets:")
for readGroupSet in dataset.getReadGroupSets():
print(
"\t", readGroupSet.getLocalId(),
readGroupSet.getReferenceSet().getLocalId(),
readGroupSet.getId(),
readGroupSet.getDataUrl(), sep="\t")
for readGroup in readGroupSet.getReadGroups():
print(
"\t\t", readGroup.getId(), readGroup.getLocalId(),
sep="\t")
print("\tVariantSets:")
for variantSet in dataset.getVariantSets():
print(
"\t", variantSet.getLocalId(),
variantSet.getReferenceSet().getLocalId(),
variantSet.getId(),
sep="\t")
if variantSet.getNumVariantAnnotationSets() > 0:
print("\t\tVariantAnnotationSets:")
for vas in variantSet.getVariantAnnotationSets():
print(
"\t\t", vas.getLocalId(),
vas.getAnnotationType(),
vas.getOntology().getName(), sep="\t")
print("\tFeatureSets:")
for featureSet in dataset.getFeatureSets():
print(
"\t", featureSet.getLocalId(),
featureSet.getReferenceSet().getLocalId(),
featureSet.getOntology().getName(),
featureSet.getId(),
sep="\t")
print("\tContinuousSets:")
for continuousSet in dataset.getContinuousSets():
print(
"\t", continuousSet.getLocalId(),
continuousSet.getReferenceSet().getLocalId(),
continuousSet.getId(),
sep="\t")
print("\tPhenotypeAssociationSets:")
for phenotypeAssociationSet in \
dataset.getPhenotypeAssociationSets():
print(
"\t", phenotypeAssociationSet.getLocalId(),
phenotypeAssociationSet.getParentContainer().getId(),
sep="\t")
# TODO - please improve this listing
print("\tRnaQuantificationSets:")
for rna_quantification_set in dataset.getRnaQuantificationSets():
print(
"\t", rna_quantification_set.getLocalId(),
rna_quantification_set.getId(), sep="\t")
for quant in rna_quantification_set.getRnaQuantifications():
print(
"\t\t", quant.getLocalId(),
quant._description,
",".join(quant._readGroupIds),
",".join(quant._featureSetIds), sep="\t") | python | def printSummary(self):
"""
Prints a summary of this data repository to stdout.
"""
print("Ontologies:")
for ontology in self.getOntologys():
print(
"",
ontology.getOntologyPrefix(),
ontology.getName(),
ontology.getDataUrl(),
sep="\t")
print("ReferenceSets:")
for referenceSet in self.getReferenceSets():
print(
"", referenceSet.getLocalId(), referenceSet.getId(),
referenceSet.getDescription(), referenceSet.getDataUrl(),
sep="\t")
for reference in referenceSet.getReferences():
print(
"\t", reference.getLocalId(), reference.getId(),
sep="\t")
print("Datasets:")
for dataset in self.getDatasets():
print(
"", dataset.getLocalId(), dataset.getId(),
dataset.getDescription(), sep="\t")
print("\tReadGroupSets:")
for readGroupSet in dataset.getReadGroupSets():
print(
"\t", readGroupSet.getLocalId(),
readGroupSet.getReferenceSet().getLocalId(),
readGroupSet.getId(),
readGroupSet.getDataUrl(), sep="\t")
for readGroup in readGroupSet.getReadGroups():
print(
"\t\t", readGroup.getId(), readGroup.getLocalId(),
sep="\t")
print("\tVariantSets:")
for variantSet in dataset.getVariantSets():
print(
"\t", variantSet.getLocalId(),
variantSet.getReferenceSet().getLocalId(),
variantSet.getId(),
sep="\t")
if variantSet.getNumVariantAnnotationSets() > 0:
print("\t\tVariantAnnotationSets:")
for vas in variantSet.getVariantAnnotationSets():
print(
"\t\t", vas.getLocalId(),
vas.getAnnotationType(),
vas.getOntology().getName(), sep="\t")
print("\tFeatureSets:")
for featureSet in dataset.getFeatureSets():
print(
"\t", featureSet.getLocalId(),
featureSet.getReferenceSet().getLocalId(),
featureSet.getOntology().getName(),
featureSet.getId(),
sep="\t")
print("\tContinuousSets:")
for continuousSet in dataset.getContinuousSets():
print(
"\t", continuousSet.getLocalId(),
continuousSet.getReferenceSet().getLocalId(),
continuousSet.getId(),
sep="\t")
print("\tPhenotypeAssociationSets:")
for phenotypeAssociationSet in \
dataset.getPhenotypeAssociationSets():
print(
"\t", phenotypeAssociationSet.getLocalId(),
phenotypeAssociationSet.getParentContainer().getId(),
sep="\t")
# TODO - please improve this listing
print("\tRnaQuantificationSets:")
for rna_quantification_set in dataset.getRnaQuantificationSets():
print(
"\t", rna_quantification_set.getLocalId(),
rna_quantification_set.getId(), sep="\t")
for quant in rna_quantification_set.getRnaQuantifications():
print(
"\t\t", quant.getLocalId(),
quant._description,
",".join(quant._readGroupIds),
",".join(quant._featureSetIds), sep="\t") | [
"def",
"printSummary",
"(",
"self",
")",
":",
"print",
"(",
"\"Ontologies:\"",
")",
"for",
"ontology",
"in",
"self",
".",
"getOntologys",
"(",
")",
":",
"print",
"(",
"\"\"",
",",
"ontology",
".",
"getOntologyPrefix",
"(",
")",
",",
"ontology",
".",
"getName",
"(",
")",
",",
"ontology",
".",
"getDataUrl",
"(",
")",
",",
"sep",
"=",
"\"\\t\"",
")",
"print",
"(",
"\"ReferenceSets:\"",
")",
"for",
"referenceSet",
"in",
"self",
".",
"getReferenceSets",
"(",
")",
":",
"print",
"(",
"\"\"",
",",
"referenceSet",
".",
"getLocalId",
"(",
")",
",",
"referenceSet",
".",
"getId",
"(",
")",
",",
"referenceSet",
".",
"getDescription",
"(",
")",
",",
"referenceSet",
".",
"getDataUrl",
"(",
")",
",",
"sep",
"=",
"\"\\t\"",
")",
"for",
"reference",
"in",
"referenceSet",
".",
"getReferences",
"(",
")",
":",
"print",
"(",
"\"\\t\"",
",",
"reference",
".",
"getLocalId",
"(",
")",
",",
"reference",
".",
"getId",
"(",
")",
",",
"sep",
"=",
"\"\\t\"",
")",
"print",
"(",
"\"Datasets:\"",
")",
"for",
"dataset",
"in",
"self",
".",
"getDatasets",
"(",
")",
":",
"print",
"(",
"\"\"",
",",
"dataset",
".",
"getLocalId",
"(",
")",
",",
"dataset",
".",
"getId",
"(",
")",
",",
"dataset",
".",
"getDescription",
"(",
")",
",",
"sep",
"=",
"\"\\t\"",
")",
"print",
"(",
"\"\\tReadGroupSets:\"",
")",
"for",
"readGroupSet",
"in",
"dataset",
".",
"getReadGroupSets",
"(",
")",
":",
"print",
"(",
"\"\\t\"",
",",
"readGroupSet",
".",
"getLocalId",
"(",
")",
",",
"readGroupSet",
".",
"getReferenceSet",
"(",
")",
".",
"getLocalId",
"(",
")",
",",
"readGroupSet",
".",
"getId",
"(",
")",
",",
"readGroupSet",
".",
"getDataUrl",
"(",
")",
",",
"sep",
"=",
"\"\\t\"",
")",
"for",
"readGroup",
"in",
"readGroupSet",
".",
"getReadGroups",
"(",
")",
":",
"print",
"(",
"\"\\t\\t\"",
",",
"readGroup",
".",
"getId",
"(",
")",
",",
"readGroup",
".",
"getLocalId",
"(",
")",
",",
"sep",
"=",
"\"\\t\"",
")",
"print",
"(",
"\"\\tVariantSets:\"",
")",
"for",
"variantSet",
"in",
"dataset",
".",
"getVariantSets",
"(",
")",
":",
"print",
"(",
"\"\\t\"",
",",
"variantSet",
".",
"getLocalId",
"(",
")",
",",
"variantSet",
".",
"getReferenceSet",
"(",
")",
".",
"getLocalId",
"(",
")",
",",
"variantSet",
".",
"getId",
"(",
")",
",",
"sep",
"=",
"\"\\t\"",
")",
"if",
"variantSet",
".",
"getNumVariantAnnotationSets",
"(",
")",
">",
"0",
":",
"print",
"(",
"\"\\t\\tVariantAnnotationSets:\"",
")",
"for",
"vas",
"in",
"variantSet",
".",
"getVariantAnnotationSets",
"(",
")",
":",
"print",
"(",
"\"\\t\\t\"",
",",
"vas",
".",
"getLocalId",
"(",
")",
",",
"vas",
".",
"getAnnotationType",
"(",
")",
",",
"vas",
".",
"getOntology",
"(",
")",
".",
"getName",
"(",
")",
",",
"sep",
"=",
"\"\\t\"",
")",
"print",
"(",
"\"\\tFeatureSets:\"",
")",
"for",
"featureSet",
"in",
"dataset",
".",
"getFeatureSets",
"(",
")",
":",
"print",
"(",
"\"\\t\"",
",",
"featureSet",
".",
"getLocalId",
"(",
")",
",",
"featureSet",
".",
"getReferenceSet",
"(",
")",
".",
"getLocalId",
"(",
")",
",",
"featureSet",
".",
"getOntology",
"(",
")",
".",
"getName",
"(",
")",
",",
"featureSet",
".",
"getId",
"(",
")",
",",
"sep",
"=",
"\"\\t\"",
")",
"print",
"(",
"\"\\tContinuousSets:\"",
")",
"for",
"continuousSet",
"in",
"dataset",
".",
"getContinuousSets",
"(",
")",
":",
"print",
"(",
"\"\\t\"",
",",
"continuousSet",
".",
"getLocalId",
"(",
")",
",",
"continuousSet",
".",
"getReferenceSet",
"(",
")",
".",
"getLocalId",
"(",
")",
",",
"continuousSet",
".",
"getId",
"(",
")",
",",
"sep",
"=",
"\"\\t\"",
")",
"print",
"(",
"\"\\tPhenotypeAssociationSets:\"",
")",
"for",
"phenotypeAssociationSet",
"in",
"dataset",
".",
"getPhenotypeAssociationSets",
"(",
")",
":",
"print",
"(",
"\"\\t\"",
",",
"phenotypeAssociationSet",
".",
"getLocalId",
"(",
")",
",",
"phenotypeAssociationSet",
".",
"getParentContainer",
"(",
")",
".",
"getId",
"(",
")",
",",
"sep",
"=",
"\"\\t\"",
")",
"# TODO - please improve this listing",
"print",
"(",
"\"\\tRnaQuantificationSets:\"",
")",
"for",
"rna_quantification_set",
"in",
"dataset",
".",
"getRnaQuantificationSets",
"(",
")",
":",
"print",
"(",
"\"\\t\"",
",",
"rna_quantification_set",
".",
"getLocalId",
"(",
")",
",",
"rna_quantification_set",
".",
"getId",
"(",
")",
",",
"sep",
"=",
"\"\\t\"",
")",
"for",
"quant",
"in",
"rna_quantification_set",
".",
"getRnaQuantifications",
"(",
")",
":",
"print",
"(",
"\"\\t\\t\"",
",",
"quant",
".",
"getLocalId",
"(",
")",
",",
"quant",
".",
"_description",
",",
"\",\"",
".",
"join",
"(",
"quant",
".",
"_readGroupIds",
")",
",",
"\",\"",
".",
"join",
"(",
"quant",
".",
"_featureSetIds",
")",
",",
"sep",
"=",
"\"\\t\"",
")"
] | Prints a summary of this data repository to stdout. | [
"Prints",
"a",
"summary",
"of",
"this",
"data",
"repository",
"to",
"stdout",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L215-L300 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.allReadGroups | def allReadGroups(self):
"""
Return an iterator over all read groups in the data repo
"""
for dataset in self.getDatasets():
for readGroupSet in dataset.getReadGroupSets():
for readGroup in readGroupSet.getReadGroups():
yield readGroup | python | def allReadGroups(self):
"""
Return an iterator over all read groups in the data repo
"""
for dataset in self.getDatasets():
for readGroupSet in dataset.getReadGroupSets():
for readGroup in readGroupSet.getReadGroups():
yield readGroup | [
"def",
"allReadGroups",
"(",
"self",
")",
":",
"for",
"dataset",
"in",
"self",
".",
"getDatasets",
"(",
")",
":",
"for",
"readGroupSet",
"in",
"dataset",
".",
"getReadGroupSets",
"(",
")",
":",
"for",
"readGroup",
"in",
"readGroupSet",
".",
"getReadGroups",
"(",
")",
":",
"yield",
"readGroup"
] | Return an iterator over all read groups in the data repo | [
"Return",
"an",
"iterator",
"over",
"all",
"read",
"groups",
"in",
"the",
"data",
"repo"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L334-L341 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.allFeatures | def allFeatures(self):
"""
Return an iterator over all features in the data repo
"""
for dataset in self.getDatasets():
for featureSet in dataset.getFeatureSets():
for feature in featureSet.getFeatures():
yield feature | python | def allFeatures(self):
"""
Return an iterator over all features in the data repo
"""
for dataset in self.getDatasets():
for featureSet in dataset.getFeatureSets():
for feature in featureSet.getFeatures():
yield feature | [
"def",
"allFeatures",
"(",
"self",
")",
":",
"for",
"dataset",
"in",
"self",
".",
"getDatasets",
"(",
")",
":",
"for",
"featureSet",
"in",
"dataset",
".",
"getFeatureSets",
"(",
")",
":",
"for",
"feature",
"in",
"featureSet",
".",
"getFeatures",
"(",
")",
":",
"yield",
"feature"
] | Return an iterator over all features in the data repo | [
"Return",
"an",
"iterator",
"over",
"all",
"features",
"in",
"the",
"data",
"repo"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L359-L366 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.allCallSets | def allCallSets(self):
"""
Return an iterator over all call sets in the data repo
"""
for dataset in self.getDatasets():
for variantSet in dataset.getVariantSets():
for callSet in variantSet.getCallSets():
yield callSet | python | def allCallSets(self):
"""
Return an iterator over all call sets in the data repo
"""
for dataset in self.getDatasets():
for variantSet in dataset.getVariantSets():
for callSet in variantSet.getCallSets():
yield callSet | [
"def",
"allCallSets",
"(",
"self",
")",
":",
"for",
"dataset",
"in",
"self",
".",
"getDatasets",
"(",
")",
":",
"for",
"variantSet",
"in",
"dataset",
".",
"getVariantSets",
"(",
")",
":",
"for",
"callSet",
"in",
"variantSet",
".",
"getCallSets",
"(",
")",
":",
"yield",
"callSet"
] | Return an iterator over all call sets in the data repo | [
"Return",
"an",
"iterator",
"over",
"all",
"call",
"sets",
"in",
"the",
"data",
"repo"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L376-L383 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.allVariantAnnotationSets | def allVariantAnnotationSets(self):
"""
Return an iterator over all variant annotation sets
in the data repo
"""
for dataset in self.getDatasets():
for variantSet in dataset.getVariantSets():
for vaSet in variantSet.getVariantAnnotationSets():
yield vaSet | python | def allVariantAnnotationSets(self):
"""
Return an iterator over all variant annotation sets
in the data repo
"""
for dataset in self.getDatasets():
for variantSet in dataset.getVariantSets():
for vaSet in variantSet.getVariantAnnotationSets():
yield vaSet | [
"def",
"allVariantAnnotationSets",
"(",
"self",
")",
":",
"for",
"dataset",
"in",
"self",
".",
"getDatasets",
"(",
")",
":",
"for",
"variantSet",
"in",
"dataset",
".",
"getVariantSets",
"(",
")",
":",
"for",
"vaSet",
"in",
"variantSet",
".",
"getVariantAnnotationSets",
"(",
")",
":",
"yield",
"vaSet"
] | Return an iterator over all variant annotation sets
in the data repo | [
"Return",
"an",
"iterator",
"over",
"all",
"variant",
"annotation",
"sets",
"in",
"the",
"data",
"repo"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L385-L393 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.allRnaQuantifications | def allRnaQuantifications(self):
"""
Return an iterator over all rna quantifications
"""
for dataset in self.getDatasets():
for rnaQuantificationSet in dataset.getRnaQuantificationSets():
for rnaQuantification in \
rnaQuantificationSet.getRnaQuantifications():
yield rnaQuantification | python | def allRnaQuantifications(self):
"""
Return an iterator over all rna quantifications
"""
for dataset in self.getDatasets():
for rnaQuantificationSet in dataset.getRnaQuantificationSets():
for rnaQuantification in \
rnaQuantificationSet.getRnaQuantifications():
yield rnaQuantification | [
"def",
"allRnaQuantifications",
"(",
"self",
")",
":",
"for",
"dataset",
"in",
"self",
".",
"getDatasets",
"(",
")",
":",
"for",
"rnaQuantificationSet",
"in",
"dataset",
".",
"getRnaQuantificationSets",
"(",
")",
":",
"for",
"rnaQuantification",
"in",
"rnaQuantificationSet",
".",
"getRnaQuantifications",
"(",
")",
":",
"yield",
"rnaQuantification"
] | Return an iterator over all rna quantifications | [
"Return",
"an",
"iterator",
"over",
"all",
"rna",
"quantifications"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L412-L420 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | AbstractDataRepository.allExpressionLevels | def allExpressionLevels(self):
"""
Return an iterator over all expression levels
"""
for dataset in self.getDatasets():
for rnaQuantificationSet in dataset.getRnaQuantificationSets():
for rnaQuantification in \
rnaQuantificationSet.getRnaQuantifications():
for expressionLevel in \
rnaQuantification.getExpressionLevels():
yield expressionLevel | python | def allExpressionLevels(self):
"""
Return an iterator over all expression levels
"""
for dataset in self.getDatasets():
for rnaQuantificationSet in dataset.getRnaQuantificationSets():
for rnaQuantification in \
rnaQuantificationSet.getRnaQuantifications():
for expressionLevel in \
rnaQuantification.getExpressionLevels():
yield expressionLevel | [
"def",
"allExpressionLevels",
"(",
"self",
")",
":",
"for",
"dataset",
"in",
"self",
".",
"getDatasets",
"(",
")",
":",
"for",
"rnaQuantificationSet",
"in",
"dataset",
".",
"getRnaQuantificationSets",
"(",
")",
":",
"for",
"rnaQuantification",
"in",
"rnaQuantificationSet",
".",
"getRnaQuantifications",
"(",
")",
":",
"for",
"expressionLevel",
"in",
"rnaQuantification",
".",
"getExpressionLevels",
"(",
")",
":",
"yield",
"expressionLevel"
] | Return an iterator over all expression levels | [
"Return",
"an",
"iterator",
"over",
"all",
"expression",
"levels"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L422-L432 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.getPeer | def getPeer(self, url):
"""
Finds a peer by URL and return the first peer record with that URL.
"""
peers = list(models.Peer.select().where(models.Peer.url == url))
if len(peers) == 0:
raise exceptions.PeerNotFoundException(url)
return peers[0] | python | def getPeer(self, url):
"""
Finds a peer by URL and return the first peer record with that URL.
"""
peers = list(models.Peer.select().where(models.Peer.url == url))
if len(peers) == 0:
raise exceptions.PeerNotFoundException(url)
return peers[0] | [
"def",
"getPeer",
"(",
"self",
",",
"url",
")",
":",
"peers",
"=",
"list",
"(",
"models",
".",
"Peer",
".",
"select",
"(",
")",
".",
"where",
"(",
"models",
".",
"Peer",
".",
"url",
"==",
"url",
")",
")",
"if",
"len",
"(",
"peers",
")",
"==",
"0",
":",
"raise",
"exceptions",
".",
"PeerNotFoundException",
"(",
"url",
")",
"return",
"peers",
"[",
"0",
"]"
] | Finds a peer by URL and return the first peer record with that URL. | [
"Finds",
"a",
"peer",
"by",
"URL",
"and",
"return",
"the",
"first",
"peer",
"record",
"with",
"that",
"URL",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L528-L535 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.getPeers | def getPeers(self, offset=0, limit=1000):
"""
Get the list of peers using an SQL offset and limit. Returns a list
of peer datamodel objects in a list.
"""
select = models.Peer.select().order_by(
models.Peer.url).limit(limit).offset(offset)
return [peers.Peer(p.url, record=p) for p in select] | python | def getPeers(self, offset=0, limit=1000):
"""
Get the list of peers using an SQL offset and limit. Returns a list
of peer datamodel objects in a list.
"""
select = models.Peer.select().order_by(
models.Peer.url).limit(limit).offset(offset)
return [peers.Peer(p.url, record=p) for p in select] | [
"def",
"getPeers",
"(",
"self",
",",
"offset",
"=",
"0",
",",
"limit",
"=",
"1000",
")",
":",
"select",
"=",
"models",
".",
"Peer",
".",
"select",
"(",
")",
".",
"order_by",
"(",
"models",
".",
"Peer",
".",
"url",
")",
".",
"limit",
"(",
"limit",
")",
".",
"offset",
"(",
"offset",
")",
"return",
"[",
"peers",
".",
"Peer",
"(",
"p",
".",
"url",
",",
"record",
"=",
"p",
")",
"for",
"p",
"in",
"select",
"]"
] | Get the list of peers using an SQL offset and limit. Returns a list
of peer datamodel objects in a list. | [
"Get",
"the",
"list",
"of",
"peers",
"using",
"an",
"SQL",
"offset",
"and",
"limit",
".",
"Returns",
"a",
"list",
"of",
"peer",
"datamodel",
"objects",
"in",
"a",
"list",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L537-L544 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.tableToTsv | def tableToTsv(self, model):
"""
Takes a model class and attempts to create a table in TSV format
that can be imported into a spreadsheet program.
"""
first = True
for item in model.select():
if first:
header = "".join(
["{}\t".format(x) for x in model._meta.fields.keys()])
print(header)
first = False
row = "".join(
["{}\t".format(
getattr(item, key)) for key in model._meta.fields.keys()])
print(row) | python | def tableToTsv(self, model):
"""
Takes a model class and attempts to create a table in TSV format
that can be imported into a spreadsheet program.
"""
first = True
for item in model.select():
if first:
header = "".join(
["{}\t".format(x) for x in model._meta.fields.keys()])
print(header)
first = False
row = "".join(
["{}\t".format(
getattr(item, key)) for key in model._meta.fields.keys()])
print(row) | [
"def",
"tableToTsv",
"(",
"self",
",",
"model",
")",
":",
"first",
"=",
"True",
"for",
"item",
"in",
"model",
".",
"select",
"(",
")",
":",
"if",
"first",
":",
"header",
"=",
"\"\"",
".",
"join",
"(",
"[",
"\"{}\\t\"",
".",
"format",
"(",
"x",
")",
"for",
"x",
"in",
"model",
".",
"_meta",
".",
"fields",
".",
"keys",
"(",
")",
"]",
")",
"print",
"(",
"header",
")",
"first",
"=",
"False",
"row",
"=",
"\"\"",
".",
"join",
"(",
"[",
"\"{}\\t\"",
".",
"format",
"(",
"getattr",
"(",
"item",
",",
"key",
")",
")",
"for",
"key",
"in",
"model",
".",
"_meta",
".",
"fields",
".",
"keys",
"(",
")",
"]",
")",
"print",
"(",
"row",
")"
] | Takes a model class and attempts to create a table in TSV format
that can be imported into a spreadsheet program. | [
"Takes",
"a",
"model",
"class",
"and",
"attempts",
"to",
"create",
"a",
"table",
"in",
"TSV",
"format",
"that",
"can",
"be",
"imported",
"into",
"a",
"spreadsheet",
"program",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L546-L561 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.clearAnnouncements | def clearAnnouncements(self):
"""
Flushes the announcement table.
"""
try:
q = models.Announcement.delete().where(
models.Announcement.id > 0)
q.execute()
except Exception as e:
raise exceptions.RepoManagerException(e) | python | def clearAnnouncements(self):
"""
Flushes the announcement table.
"""
try:
q = models.Announcement.delete().where(
models.Announcement.id > 0)
q.execute()
except Exception as e:
raise exceptions.RepoManagerException(e) | [
"def",
"clearAnnouncements",
"(",
"self",
")",
":",
"try",
":",
"q",
"=",
"models",
".",
"Announcement",
".",
"delete",
"(",
")",
".",
"where",
"(",
"models",
".",
"Announcement",
".",
"id",
">",
"0",
")",
"q",
".",
"execute",
"(",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"exceptions",
".",
"RepoManagerException",
"(",
"e",
")"
] | Flushes the announcement table. | [
"Flushes",
"the",
"announcement",
"table",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L569-L578 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertAnnouncement | def insertAnnouncement(self, announcement):
"""
Adds an announcement to the registry for later analysis.
"""
url = announcement.get('url', None)
try:
peers.Peer(url)
except:
raise exceptions.BadUrlException(url)
try:
# TODO get more details about the user agent
models.Announcement.create(
url=announcement.get('url'),
attributes=json.dumps(announcement.get('attributes', {})),
remote_addr=announcement.get('remote_addr', None),
user_agent=announcement.get('user_agent', None))
except Exception as e:
raise exceptions.RepoManagerException(e) | python | def insertAnnouncement(self, announcement):
"""
Adds an announcement to the registry for later analysis.
"""
url = announcement.get('url', None)
try:
peers.Peer(url)
except:
raise exceptions.BadUrlException(url)
try:
# TODO get more details about the user agent
models.Announcement.create(
url=announcement.get('url'),
attributes=json.dumps(announcement.get('attributes', {})),
remote_addr=announcement.get('remote_addr', None),
user_agent=announcement.get('user_agent', None))
except Exception as e:
raise exceptions.RepoManagerException(e) | [
"def",
"insertAnnouncement",
"(",
"self",
",",
"announcement",
")",
":",
"url",
"=",
"announcement",
".",
"get",
"(",
"'url'",
",",
"None",
")",
"try",
":",
"peers",
".",
"Peer",
"(",
"url",
")",
"except",
":",
"raise",
"exceptions",
".",
"BadUrlException",
"(",
"url",
")",
"try",
":",
"# TODO get more details about the user agent",
"models",
".",
"Announcement",
".",
"create",
"(",
"url",
"=",
"announcement",
".",
"get",
"(",
"'url'",
")",
",",
"attributes",
"=",
"json",
".",
"dumps",
"(",
"announcement",
".",
"get",
"(",
"'attributes'",
",",
"{",
"}",
")",
")",
",",
"remote_addr",
"=",
"announcement",
".",
"get",
"(",
"'remote_addr'",
",",
"None",
")",
",",
"user_agent",
"=",
"announcement",
".",
"get",
"(",
"'user_agent'",
",",
"None",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"exceptions",
".",
"RepoManagerException",
"(",
"e",
")"
] | Adds an announcement to the registry for later analysis. | [
"Adds",
"an",
"announcement",
"to",
"the",
"registry",
"for",
"later",
"analysis",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L580-L597 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.open | def open(self, mode=MODE_READ):
"""
Opens this repo in the specified mode.
TODO: figure out the correct semantics of this and document
the intended future behaviour as well as the current
transitional behaviour.
"""
if mode not in [MODE_READ, MODE_WRITE]:
error = "Open mode must be '{}' or '{}'".format(
MODE_READ, MODE_WRITE)
raise ValueError(error)
self._openMode = mode
if mode == MODE_READ:
self.assertExists()
if mode == MODE_READ:
# This is part of the transitional behaviour where
# we load the whole DB into memory to get access to
# the data model.
self.load() | python | def open(self, mode=MODE_READ):
"""
Opens this repo in the specified mode.
TODO: figure out the correct semantics of this and document
the intended future behaviour as well as the current
transitional behaviour.
"""
if mode not in [MODE_READ, MODE_WRITE]:
error = "Open mode must be '{}' or '{}'".format(
MODE_READ, MODE_WRITE)
raise ValueError(error)
self._openMode = mode
if mode == MODE_READ:
self.assertExists()
if mode == MODE_READ:
# This is part of the transitional behaviour where
# we load the whole DB into memory to get access to
# the data model.
self.load() | [
"def",
"open",
"(",
"self",
",",
"mode",
"=",
"MODE_READ",
")",
":",
"if",
"mode",
"not",
"in",
"[",
"MODE_READ",
",",
"MODE_WRITE",
"]",
":",
"error",
"=",
"\"Open mode must be '{}' or '{}'\"",
".",
"format",
"(",
"MODE_READ",
",",
"MODE_WRITE",
")",
"raise",
"ValueError",
"(",
"error",
")",
"self",
".",
"_openMode",
"=",
"mode",
"if",
"mode",
"==",
"MODE_READ",
":",
"self",
".",
"assertExists",
"(",
")",
"if",
"mode",
"==",
"MODE_READ",
":",
"# This is part of the transitional behaviour where",
"# we load the whole DB into memory to get access to",
"# the data model.",
"self",
".",
"load",
"(",
")"
] | Opens this repo in the specified mode.
TODO: figure out the correct semantics of this and document
the intended future behaviour as well as the current
transitional behaviour. | [
"Opens",
"this",
"repo",
"in",
"the",
"specified",
"mode",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L599-L618 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.verify | def verify(self):
"""
Verifies that the data in the repository is consistent.
"""
# TODO this should emit to a log that we can configure so we can
# have verbosity levels. We should provide a way to configure
# where we look at various chromosomes and so on. This will be
# an important debug tool for administrators.
for ontology in self.getOntologys():
print(
"Verifying Ontology", ontology.getName(),
"@", ontology.getDataUrl())
# TODO how do we verify this? Check some well-know SO terms?
for referenceSet in self.getReferenceSets():
print(
"Verifying ReferenceSet", referenceSet.getLocalId(),
"@", referenceSet.getDataUrl())
for reference in referenceSet.getReferences():
length = min(reference.getLength(), 1000)
bases = reference.getBases(0, length)
assert len(bases) == length
print(
"\tReading", length, "bases from",
reference.getLocalId())
for dataset in self.getDatasets():
print("Verifying Dataset", dataset.getLocalId())
for featureSet in dataset.getFeatureSets():
for referenceSet in self.getReferenceSets():
# TODO cycle through references?
reference = referenceSet.getReferences()[0]
print(
"\tVerifying FeatureSet",
featureSet.getLocalId(),
"with reference", reference.getLocalId())
length = min(reference.getLength(), 1000)
features = featureSet.getFeatures(
reference.getLocalId(), 0, length, None, 3)
for feature in features:
print("\t{}".format(feature))
# for continuousSet in dataset.getContinuousSets():
# -- there is no getContinuous
for readGroupSet in dataset.getReadGroupSets():
print(
"\tVerifying ReadGroupSet", readGroupSet.getLocalId(),
"@", readGroupSet.getDataUrl())
references = readGroupSet.getReferenceSet().getReferences()
# TODO should we cycle through the references? Should probably
# be an option.
reference = references[0]
max_alignments = 10
for readGroup in readGroupSet.getReadGroups():
alignments = readGroup.getReadAlignments(reference)
for i, alignment in enumerate(alignments):
if i == max_alignments:
break
print(
"\t\tRead", i, "alignments from",
readGroup.getLocalId())
for variantSet in dataset.getVariantSets():
print("\tVerifying VariantSet", variantSet.getLocalId())
max_variants = 10
max_annotations = 10
refMap = variantSet.getReferenceToDataUrlIndexMap()
for referenceName, (dataUrl, indexFile) in refMap.items():
variants = variantSet.getVariants(referenceName, 0, 2**31)
for i, variant in enumerate(variants):
if i == max_variants:
break
print(
"\t\tRead", i, "variants from reference",
referenceName, "@", dataUrl)
for annotationSet in variantSet.getVariantAnnotationSets():
print(
"\t\tVerifying VariantAnnotationSet",
annotationSet.getLocalId())
for referenceName in refMap.keys():
annotations = annotationSet.getVariantAnnotations(
referenceName, 0, 2**31)
for i, annotation in enumerate(annotations):
if i == max_annotations:
break
print(
"\t\t\tRead", i, "annotations from reference",
referenceName)
for phenotypeAssociationSet \
in dataset.getPhenotypeAssociationSets():
print("\t\tVerifying PhenotypeAssociationSet")
print(
"\t\t\t", phenotypeAssociationSet.getLocalId(),
phenotypeAssociationSet.getParentContainer().getId(),
sep="\t") | python | def verify(self):
"""
Verifies that the data in the repository is consistent.
"""
# TODO this should emit to a log that we can configure so we can
# have verbosity levels. We should provide a way to configure
# where we look at various chromosomes and so on. This will be
# an important debug tool for administrators.
for ontology in self.getOntologys():
print(
"Verifying Ontology", ontology.getName(),
"@", ontology.getDataUrl())
# TODO how do we verify this? Check some well-know SO terms?
for referenceSet in self.getReferenceSets():
print(
"Verifying ReferenceSet", referenceSet.getLocalId(),
"@", referenceSet.getDataUrl())
for reference in referenceSet.getReferences():
length = min(reference.getLength(), 1000)
bases = reference.getBases(0, length)
assert len(bases) == length
print(
"\tReading", length, "bases from",
reference.getLocalId())
for dataset in self.getDatasets():
print("Verifying Dataset", dataset.getLocalId())
for featureSet in dataset.getFeatureSets():
for referenceSet in self.getReferenceSets():
# TODO cycle through references?
reference = referenceSet.getReferences()[0]
print(
"\tVerifying FeatureSet",
featureSet.getLocalId(),
"with reference", reference.getLocalId())
length = min(reference.getLength(), 1000)
features = featureSet.getFeatures(
reference.getLocalId(), 0, length, None, 3)
for feature in features:
print("\t{}".format(feature))
# for continuousSet in dataset.getContinuousSets():
# -- there is no getContinuous
for readGroupSet in dataset.getReadGroupSets():
print(
"\tVerifying ReadGroupSet", readGroupSet.getLocalId(),
"@", readGroupSet.getDataUrl())
references = readGroupSet.getReferenceSet().getReferences()
# TODO should we cycle through the references? Should probably
# be an option.
reference = references[0]
max_alignments = 10
for readGroup in readGroupSet.getReadGroups():
alignments = readGroup.getReadAlignments(reference)
for i, alignment in enumerate(alignments):
if i == max_alignments:
break
print(
"\t\tRead", i, "alignments from",
readGroup.getLocalId())
for variantSet in dataset.getVariantSets():
print("\tVerifying VariantSet", variantSet.getLocalId())
max_variants = 10
max_annotations = 10
refMap = variantSet.getReferenceToDataUrlIndexMap()
for referenceName, (dataUrl, indexFile) in refMap.items():
variants = variantSet.getVariants(referenceName, 0, 2**31)
for i, variant in enumerate(variants):
if i == max_variants:
break
print(
"\t\tRead", i, "variants from reference",
referenceName, "@", dataUrl)
for annotationSet in variantSet.getVariantAnnotationSets():
print(
"\t\tVerifying VariantAnnotationSet",
annotationSet.getLocalId())
for referenceName in refMap.keys():
annotations = annotationSet.getVariantAnnotations(
referenceName, 0, 2**31)
for i, annotation in enumerate(annotations):
if i == max_annotations:
break
print(
"\t\t\tRead", i, "annotations from reference",
referenceName)
for phenotypeAssociationSet \
in dataset.getPhenotypeAssociationSets():
print("\t\tVerifying PhenotypeAssociationSet")
print(
"\t\t\t", phenotypeAssociationSet.getLocalId(),
phenotypeAssociationSet.getParentContainer().getId(),
sep="\t") | [
"def",
"verify",
"(",
"self",
")",
":",
"# TODO this should emit to a log that we can configure so we can",
"# have verbosity levels. We should provide a way to configure",
"# where we look at various chromosomes and so on. This will be",
"# an important debug tool for administrators.",
"for",
"ontology",
"in",
"self",
".",
"getOntologys",
"(",
")",
":",
"print",
"(",
"\"Verifying Ontology\"",
",",
"ontology",
".",
"getName",
"(",
")",
",",
"\"@\"",
",",
"ontology",
".",
"getDataUrl",
"(",
")",
")",
"# TODO how do we verify this? Check some well-know SO terms?",
"for",
"referenceSet",
"in",
"self",
".",
"getReferenceSets",
"(",
")",
":",
"print",
"(",
"\"Verifying ReferenceSet\"",
",",
"referenceSet",
".",
"getLocalId",
"(",
")",
",",
"\"@\"",
",",
"referenceSet",
".",
"getDataUrl",
"(",
")",
")",
"for",
"reference",
"in",
"referenceSet",
".",
"getReferences",
"(",
")",
":",
"length",
"=",
"min",
"(",
"reference",
".",
"getLength",
"(",
")",
",",
"1000",
")",
"bases",
"=",
"reference",
".",
"getBases",
"(",
"0",
",",
"length",
")",
"assert",
"len",
"(",
"bases",
")",
"==",
"length",
"print",
"(",
"\"\\tReading\"",
",",
"length",
",",
"\"bases from\"",
",",
"reference",
".",
"getLocalId",
"(",
")",
")",
"for",
"dataset",
"in",
"self",
".",
"getDatasets",
"(",
")",
":",
"print",
"(",
"\"Verifying Dataset\"",
",",
"dataset",
".",
"getLocalId",
"(",
")",
")",
"for",
"featureSet",
"in",
"dataset",
".",
"getFeatureSets",
"(",
")",
":",
"for",
"referenceSet",
"in",
"self",
".",
"getReferenceSets",
"(",
")",
":",
"# TODO cycle through references?",
"reference",
"=",
"referenceSet",
".",
"getReferences",
"(",
")",
"[",
"0",
"]",
"print",
"(",
"\"\\tVerifying FeatureSet\"",
",",
"featureSet",
".",
"getLocalId",
"(",
")",
",",
"\"with reference\"",
",",
"reference",
".",
"getLocalId",
"(",
")",
")",
"length",
"=",
"min",
"(",
"reference",
".",
"getLength",
"(",
")",
",",
"1000",
")",
"features",
"=",
"featureSet",
".",
"getFeatures",
"(",
"reference",
".",
"getLocalId",
"(",
")",
",",
"0",
",",
"length",
",",
"None",
",",
"3",
")",
"for",
"feature",
"in",
"features",
":",
"print",
"(",
"\"\\t{}\"",
".",
"format",
"(",
"feature",
")",
")",
"# for continuousSet in dataset.getContinuousSets():",
"# -- there is no getContinuous",
"for",
"readGroupSet",
"in",
"dataset",
".",
"getReadGroupSets",
"(",
")",
":",
"print",
"(",
"\"\\tVerifying ReadGroupSet\"",
",",
"readGroupSet",
".",
"getLocalId",
"(",
")",
",",
"\"@\"",
",",
"readGroupSet",
".",
"getDataUrl",
"(",
")",
")",
"references",
"=",
"readGroupSet",
".",
"getReferenceSet",
"(",
")",
".",
"getReferences",
"(",
")",
"# TODO should we cycle through the references? Should probably",
"# be an option.",
"reference",
"=",
"references",
"[",
"0",
"]",
"max_alignments",
"=",
"10",
"for",
"readGroup",
"in",
"readGroupSet",
".",
"getReadGroups",
"(",
")",
":",
"alignments",
"=",
"readGroup",
".",
"getReadAlignments",
"(",
"reference",
")",
"for",
"i",
",",
"alignment",
"in",
"enumerate",
"(",
"alignments",
")",
":",
"if",
"i",
"==",
"max_alignments",
":",
"break",
"print",
"(",
"\"\\t\\tRead\"",
",",
"i",
",",
"\"alignments from\"",
",",
"readGroup",
".",
"getLocalId",
"(",
")",
")",
"for",
"variantSet",
"in",
"dataset",
".",
"getVariantSets",
"(",
")",
":",
"print",
"(",
"\"\\tVerifying VariantSet\"",
",",
"variantSet",
".",
"getLocalId",
"(",
")",
")",
"max_variants",
"=",
"10",
"max_annotations",
"=",
"10",
"refMap",
"=",
"variantSet",
".",
"getReferenceToDataUrlIndexMap",
"(",
")",
"for",
"referenceName",
",",
"(",
"dataUrl",
",",
"indexFile",
")",
"in",
"refMap",
".",
"items",
"(",
")",
":",
"variants",
"=",
"variantSet",
".",
"getVariants",
"(",
"referenceName",
",",
"0",
",",
"2",
"**",
"31",
")",
"for",
"i",
",",
"variant",
"in",
"enumerate",
"(",
"variants",
")",
":",
"if",
"i",
"==",
"max_variants",
":",
"break",
"print",
"(",
"\"\\t\\tRead\"",
",",
"i",
",",
"\"variants from reference\"",
",",
"referenceName",
",",
"\"@\"",
",",
"dataUrl",
")",
"for",
"annotationSet",
"in",
"variantSet",
".",
"getVariantAnnotationSets",
"(",
")",
":",
"print",
"(",
"\"\\t\\tVerifying VariantAnnotationSet\"",
",",
"annotationSet",
".",
"getLocalId",
"(",
")",
")",
"for",
"referenceName",
"in",
"refMap",
".",
"keys",
"(",
")",
":",
"annotations",
"=",
"annotationSet",
".",
"getVariantAnnotations",
"(",
"referenceName",
",",
"0",
",",
"2",
"**",
"31",
")",
"for",
"i",
",",
"annotation",
"in",
"enumerate",
"(",
"annotations",
")",
":",
"if",
"i",
"==",
"max_annotations",
":",
"break",
"print",
"(",
"\"\\t\\t\\tRead\"",
",",
"i",
",",
"\"annotations from reference\"",
",",
"referenceName",
")",
"for",
"phenotypeAssociationSet",
"in",
"dataset",
".",
"getPhenotypeAssociationSets",
"(",
")",
":",
"print",
"(",
"\"\\t\\tVerifying PhenotypeAssociationSet\"",
")",
"print",
"(",
"\"\\t\\t\\t\"",
",",
"phenotypeAssociationSet",
".",
"getLocalId",
"(",
")",
",",
"phenotypeAssociationSet",
".",
"getParentContainer",
"(",
")",
".",
"getId",
"(",
")",
",",
"sep",
"=",
"\"\\t\"",
")"
] | Verifies that the data in the repository is consistent. | [
"Verifies",
"that",
"the",
"data",
"in",
"the",
"repository",
"is",
"consistent",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L635-L725 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertOntology | def insertOntology(self, ontology):
"""
Inserts the specified ontology into this repository.
"""
try:
models.Ontology.create(
id=ontology.getName(),
name=ontology.getName(),
dataurl=ontology.getDataUrl(),
ontologyprefix=ontology.getOntologyPrefix())
except Exception:
raise exceptions.DuplicateNameException(
ontology.getName()) | python | def insertOntology(self, ontology):
"""
Inserts the specified ontology into this repository.
"""
try:
models.Ontology.create(
id=ontology.getName(),
name=ontology.getName(),
dataurl=ontology.getDataUrl(),
ontologyprefix=ontology.getOntologyPrefix())
except Exception:
raise exceptions.DuplicateNameException(
ontology.getName()) | [
"def",
"insertOntology",
"(",
"self",
",",
"ontology",
")",
":",
"try",
":",
"models",
".",
"Ontology",
".",
"create",
"(",
"id",
"=",
"ontology",
".",
"getName",
"(",
")",
",",
"name",
"=",
"ontology",
".",
"getName",
"(",
")",
",",
"dataurl",
"=",
"ontology",
".",
"getDataUrl",
"(",
")",
",",
"ontologyprefix",
"=",
"ontology",
".",
"getOntologyPrefix",
"(",
")",
")",
"except",
"Exception",
":",
"raise",
"exceptions",
".",
"DuplicateNameException",
"(",
"ontology",
".",
"getName",
"(",
")",
")"
] | Inserts the specified ontology into this repository. | [
"Inserts",
"the",
"specified",
"ontology",
"into",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L753-L765 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.removeOntology | def removeOntology(self, ontology):
"""
Removes the specified ontology term map from this repository.
"""
q = models.Ontology.delete().where(id == ontology.getId())
q.execute() | python | def removeOntology(self, ontology):
"""
Removes the specified ontology term map from this repository.
"""
q = models.Ontology.delete().where(id == ontology.getId())
q.execute() | [
"def",
"removeOntology",
"(",
"self",
",",
"ontology",
")",
":",
"q",
"=",
"models",
".",
"Ontology",
".",
"delete",
"(",
")",
".",
"where",
"(",
"id",
"==",
"ontology",
".",
"getId",
"(",
")",
")",
"q",
".",
"execute",
"(",
")"
] | Removes the specified ontology term map from this repository. | [
"Removes",
"the",
"specified",
"ontology",
"term",
"map",
"from",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L775-L780 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertReference | def insertReference(self, reference):
"""
Inserts the specified reference into this repository.
"""
models.Reference.create(
id=reference.getId(),
referencesetid=reference.getParentContainer().getId(),
name=reference.getLocalId(),
length=reference.getLength(),
isderived=reference.getIsDerived(),
species=json.dumps(reference.getSpecies()),
md5checksum=reference.getMd5Checksum(),
sourceaccessions=json.dumps(reference.getSourceAccessions()),
sourceuri=reference.getSourceUri()) | python | def insertReference(self, reference):
"""
Inserts the specified reference into this repository.
"""
models.Reference.create(
id=reference.getId(),
referencesetid=reference.getParentContainer().getId(),
name=reference.getLocalId(),
length=reference.getLength(),
isderived=reference.getIsDerived(),
species=json.dumps(reference.getSpecies()),
md5checksum=reference.getMd5Checksum(),
sourceaccessions=json.dumps(reference.getSourceAccessions()),
sourceuri=reference.getSourceUri()) | [
"def",
"insertReference",
"(",
"self",
",",
"reference",
")",
":",
"models",
".",
"Reference",
".",
"create",
"(",
"id",
"=",
"reference",
".",
"getId",
"(",
")",
",",
"referencesetid",
"=",
"reference",
".",
"getParentContainer",
"(",
")",
".",
"getId",
"(",
")",
",",
"name",
"=",
"reference",
".",
"getLocalId",
"(",
")",
",",
"length",
"=",
"reference",
".",
"getLength",
"(",
")",
",",
"isderived",
"=",
"reference",
".",
"getIsDerived",
"(",
")",
",",
"species",
"=",
"json",
".",
"dumps",
"(",
"reference",
".",
"getSpecies",
"(",
")",
")",
",",
"md5checksum",
"=",
"reference",
".",
"getMd5Checksum",
"(",
")",
",",
"sourceaccessions",
"=",
"json",
".",
"dumps",
"(",
"reference",
".",
"getSourceAccessions",
"(",
")",
")",
",",
"sourceuri",
"=",
"reference",
".",
"getSourceUri",
"(",
")",
")"
] | Inserts the specified reference into this repository. | [
"Inserts",
"the",
"specified",
"reference",
"into",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L785-L798 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertReferenceSet | def insertReferenceSet(self, referenceSet):
"""
Inserts the specified referenceSet into this repository.
"""
try:
models.Referenceset.create(
id=referenceSet.getId(),
name=referenceSet.getLocalId(),
description=referenceSet.getDescription(),
assemblyid=referenceSet.getAssemblyId(),
isderived=referenceSet.getIsDerived(),
species=json.dumps(referenceSet.getSpecies()),
md5checksum=referenceSet.getMd5Checksum(),
sourceaccessions=json.dumps(
referenceSet.getSourceAccessions()),
sourceuri=referenceSet.getSourceUri(),
dataurl=referenceSet.getDataUrl())
for reference in referenceSet.getReferences():
self.insertReference(reference)
except Exception:
raise exceptions.DuplicateNameException(
referenceSet.getLocalId()) | python | def insertReferenceSet(self, referenceSet):
"""
Inserts the specified referenceSet into this repository.
"""
try:
models.Referenceset.create(
id=referenceSet.getId(),
name=referenceSet.getLocalId(),
description=referenceSet.getDescription(),
assemblyid=referenceSet.getAssemblyId(),
isderived=referenceSet.getIsDerived(),
species=json.dumps(referenceSet.getSpecies()),
md5checksum=referenceSet.getMd5Checksum(),
sourceaccessions=json.dumps(
referenceSet.getSourceAccessions()),
sourceuri=referenceSet.getSourceUri(),
dataurl=referenceSet.getDataUrl())
for reference in referenceSet.getReferences():
self.insertReference(reference)
except Exception:
raise exceptions.DuplicateNameException(
referenceSet.getLocalId()) | [
"def",
"insertReferenceSet",
"(",
"self",
",",
"referenceSet",
")",
":",
"try",
":",
"models",
".",
"Referenceset",
".",
"create",
"(",
"id",
"=",
"referenceSet",
".",
"getId",
"(",
")",
",",
"name",
"=",
"referenceSet",
".",
"getLocalId",
"(",
")",
",",
"description",
"=",
"referenceSet",
".",
"getDescription",
"(",
")",
",",
"assemblyid",
"=",
"referenceSet",
".",
"getAssemblyId",
"(",
")",
",",
"isderived",
"=",
"referenceSet",
".",
"getIsDerived",
"(",
")",
",",
"species",
"=",
"json",
".",
"dumps",
"(",
"referenceSet",
".",
"getSpecies",
"(",
")",
")",
",",
"md5checksum",
"=",
"referenceSet",
".",
"getMd5Checksum",
"(",
")",
",",
"sourceaccessions",
"=",
"json",
".",
"dumps",
"(",
"referenceSet",
".",
"getSourceAccessions",
"(",
")",
")",
",",
"sourceuri",
"=",
"referenceSet",
".",
"getSourceUri",
"(",
")",
",",
"dataurl",
"=",
"referenceSet",
".",
"getDataUrl",
"(",
")",
")",
"for",
"reference",
"in",
"referenceSet",
".",
"getReferences",
"(",
")",
":",
"self",
".",
"insertReference",
"(",
"reference",
")",
"except",
"Exception",
":",
"raise",
"exceptions",
".",
"DuplicateNameException",
"(",
"referenceSet",
".",
"getLocalId",
"(",
")",
")"
] | Inserts the specified referenceSet into this repository. | [
"Inserts",
"the",
"specified",
"referenceSet",
"into",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L813-L834 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertDataset | def insertDataset(self, dataset):
"""
Inserts the specified dataset into this repository.
"""
try:
models.Dataset.create(
id=dataset.getId(),
name=dataset.getLocalId(),
description=dataset.getDescription(),
attributes=json.dumps(dataset.getAttributes()))
except Exception:
raise exceptions.DuplicateNameException(
dataset.getLocalId()) | python | def insertDataset(self, dataset):
"""
Inserts the specified dataset into this repository.
"""
try:
models.Dataset.create(
id=dataset.getId(),
name=dataset.getLocalId(),
description=dataset.getDescription(),
attributes=json.dumps(dataset.getAttributes()))
except Exception:
raise exceptions.DuplicateNameException(
dataset.getLocalId()) | [
"def",
"insertDataset",
"(",
"self",
",",
"dataset",
")",
":",
"try",
":",
"models",
".",
"Dataset",
".",
"create",
"(",
"id",
"=",
"dataset",
".",
"getId",
"(",
")",
",",
"name",
"=",
"dataset",
".",
"getLocalId",
"(",
")",
",",
"description",
"=",
"dataset",
".",
"getDescription",
"(",
")",
",",
"attributes",
"=",
"json",
".",
"dumps",
"(",
"dataset",
".",
"getAttributes",
"(",
")",
")",
")",
"except",
"Exception",
":",
"raise",
"exceptions",
".",
"DuplicateNameException",
"(",
"dataset",
".",
"getLocalId",
"(",
")",
")"
] | Inserts the specified dataset into this repository. | [
"Inserts",
"the",
"specified",
"dataset",
"into",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L848-L860 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.removeDataset | def removeDataset(self, dataset):
"""
Removes the specified dataset from this repository. This performs
a cascading removal of all items within this dataset.
"""
for datasetRecord in models.Dataset.select().where(
models.Dataset.id == dataset.getId()):
datasetRecord.delete_instance(recursive=True) | python | def removeDataset(self, dataset):
"""
Removes the specified dataset from this repository. This performs
a cascading removal of all items within this dataset.
"""
for datasetRecord in models.Dataset.select().where(
models.Dataset.id == dataset.getId()):
datasetRecord.delete_instance(recursive=True) | [
"def",
"removeDataset",
"(",
"self",
",",
"dataset",
")",
":",
"for",
"datasetRecord",
"in",
"models",
".",
"Dataset",
".",
"select",
"(",
")",
".",
"where",
"(",
"models",
".",
"Dataset",
".",
"id",
"==",
"dataset",
".",
"getId",
"(",
")",
")",
":",
"datasetRecord",
".",
"delete_instance",
"(",
"recursive",
"=",
"True",
")"
] | Removes the specified dataset from this repository. This performs
a cascading removal of all items within this dataset. | [
"Removes",
"the",
"specified",
"dataset",
"from",
"this",
"repository",
".",
"This",
"performs",
"a",
"cascading",
"removal",
"of",
"all",
"items",
"within",
"this",
"dataset",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L862-L869 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.removePhenotypeAssociationSet | def removePhenotypeAssociationSet(self, phenotypeAssociationSet):
"""
Remove a phenotype association set from the repo
"""
q = models.Phenotypeassociationset.delete().where(
models.Phenotypeassociationset.id ==
phenotypeAssociationSet.getId())
q.execute() | python | def removePhenotypeAssociationSet(self, phenotypeAssociationSet):
"""
Remove a phenotype association set from the repo
"""
q = models.Phenotypeassociationset.delete().where(
models.Phenotypeassociationset.id ==
phenotypeAssociationSet.getId())
q.execute() | [
"def",
"removePhenotypeAssociationSet",
"(",
"self",
",",
"phenotypeAssociationSet",
")",
":",
"q",
"=",
"models",
".",
"Phenotypeassociationset",
".",
"delete",
"(",
")",
".",
"where",
"(",
"models",
".",
"Phenotypeassociationset",
".",
"id",
"==",
"phenotypeAssociationSet",
".",
"getId",
"(",
")",
")",
"q",
".",
"execute",
"(",
")"
] | Remove a phenotype association set from the repo | [
"Remove",
"a",
"phenotype",
"association",
"set",
"from",
"the",
"repo"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L871-L878 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.removeFeatureSet | def removeFeatureSet(self, featureSet):
"""
Removes the specified featureSet from this repository.
"""
q = models.Featureset.delete().where(
models.Featureset.id == featureSet.getId())
q.execute() | python | def removeFeatureSet(self, featureSet):
"""
Removes the specified featureSet from this repository.
"""
q = models.Featureset.delete().where(
models.Featureset.id == featureSet.getId())
q.execute() | [
"def",
"removeFeatureSet",
"(",
"self",
",",
"featureSet",
")",
":",
"q",
"=",
"models",
".",
"Featureset",
".",
"delete",
"(",
")",
".",
"where",
"(",
"models",
".",
"Featureset",
".",
"id",
"==",
"featureSet",
".",
"getId",
"(",
")",
")",
"q",
".",
"execute",
"(",
")"
] | Removes the specified featureSet from this repository. | [
"Removes",
"the",
"specified",
"featureSet",
"from",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L880-L886 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.removeContinuousSet | def removeContinuousSet(self, continuousSet):
"""
Removes the specified continuousSet from this repository.
"""
q = models.ContinuousSet.delete().where(
models.ContinuousSet.id == continuousSet.getId())
q.execute() | python | def removeContinuousSet(self, continuousSet):
"""
Removes the specified continuousSet from this repository.
"""
q = models.ContinuousSet.delete().where(
models.ContinuousSet.id == continuousSet.getId())
q.execute() | [
"def",
"removeContinuousSet",
"(",
"self",
",",
"continuousSet",
")",
":",
"q",
"=",
"models",
".",
"ContinuousSet",
".",
"delete",
"(",
")",
".",
"where",
"(",
"models",
".",
"ContinuousSet",
".",
"id",
"==",
"continuousSet",
".",
"getId",
"(",
")",
")",
"q",
".",
"execute",
"(",
")"
] | Removes the specified continuousSet from this repository. | [
"Removes",
"the",
"specified",
"continuousSet",
"from",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L888-L894 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertReadGroup | def insertReadGroup(self, readGroup):
"""
Inserts the specified readGroup into the DB.
"""
statsJson = json.dumps(protocol.toJsonDict(readGroup.getStats()))
experimentJson = json.dumps(
protocol.toJsonDict(readGroup.getExperiment()))
try:
models.Readgroup.create(
id=readGroup.getId(),
readgroupsetid=readGroup.getParentContainer().getId(),
name=readGroup.getLocalId(),
predictedinsertedsize=readGroup.getPredictedInsertSize(),
samplename=readGroup.getSampleName(),
description=readGroup.getDescription(),
stats=statsJson,
experiment=experimentJson,
biosampleid=readGroup.getBiosampleId(),
attributes=json.dumps(readGroup.getAttributes()))
except Exception as e:
raise exceptions.RepoManagerException(e) | python | def insertReadGroup(self, readGroup):
"""
Inserts the specified readGroup into the DB.
"""
statsJson = json.dumps(protocol.toJsonDict(readGroup.getStats()))
experimentJson = json.dumps(
protocol.toJsonDict(readGroup.getExperiment()))
try:
models.Readgroup.create(
id=readGroup.getId(),
readgroupsetid=readGroup.getParentContainer().getId(),
name=readGroup.getLocalId(),
predictedinsertedsize=readGroup.getPredictedInsertSize(),
samplename=readGroup.getSampleName(),
description=readGroup.getDescription(),
stats=statsJson,
experiment=experimentJson,
biosampleid=readGroup.getBiosampleId(),
attributes=json.dumps(readGroup.getAttributes()))
except Exception as e:
raise exceptions.RepoManagerException(e) | [
"def",
"insertReadGroup",
"(",
"self",
",",
"readGroup",
")",
":",
"statsJson",
"=",
"json",
".",
"dumps",
"(",
"protocol",
".",
"toJsonDict",
"(",
"readGroup",
".",
"getStats",
"(",
")",
")",
")",
"experimentJson",
"=",
"json",
".",
"dumps",
"(",
"protocol",
".",
"toJsonDict",
"(",
"readGroup",
".",
"getExperiment",
"(",
")",
")",
")",
"try",
":",
"models",
".",
"Readgroup",
".",
"create",
"(",
"id",
"=",
"readGroup",
".",
"getId",
"(",
")",
",",
"readgroupsetid",
"=",
"readGroup",
".",
"getParentContainer",
"(",
")",
".",
"getId",
"(",
")",
",",
"name",
"=",
"readGroup",
".",
"getLocalId",
"(",
")",
",",
"predictedinsertedsize",
"=",
"readGroup",
".",
"getPredictedInsertSize",
"(",
")",
",",
"samplename",
"=",
"readGroup",
".",
"getSampleName",
"(",
")",
",",
"description",
"=",
"readGroup",
".",
"getDescription",
"(",
")",
",",
"stats",
"=",
"statsJson",
",",
"experiment",
"=",
"experimentJson",
",",
"biosampleid",
"=",
"readGroup",
".",
"getBiosampleId",
"(",
")",
",",
"attributes",
"=",
"json",
".",
"dumps",
"(",
"readGroup",
".",
"getAttributes",
"(",
")",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"exceptions",
".",
"RepoManagerException",
"(",
"e",
")"
] | Inserts the specified readGroup into the DB. | [
"Inserts",
"the",
"specified",
"readGroup",
"into",
"the",
"DB",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L907-L927 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.removeReadGroupSet | def removeReadGroupSet(self, readGroupSet):
"""
Removes the specified readGroupSet from this repository. This performs
a cascading removal of all items within this readGroupSet.
"""
for readGroupSetRecord in models.Readgroupset.select().where(
models.Readgroupset.id == readGroupSet.getId()):
readGroupSetRecord.delete_instance(recursive=True) | python | def removeReadGroupSet(self, readGroupSet):
"""
Removes the specified readGroupSet from this repository. This performs
a cascading removal of all items within this readGroupSet.
"""
for readGroupSetRecord in models.Readgroupset.select().where(
models.Readgroupset.id == readGroupSet.getId()):
readGroupSetRecord.delete_instance(recursive=True) | [
"def",
"removeReadGroupSet",
"(",
"self",
",",
"readGroupSet",
")",
":",
"for",
"readGroupSetRecord",
"in",
"models",
".",
"Readgroupset",
".",
"select",
"(",
")",
".",
"where",
"(",
"models",
".",
"Readgroupset",
".",
"id",
"==",
"readGroupSet",
".",
"getId",
"(",
")",
")",
":",
"readGroupSetRecord",
".",
"delete_instance",
"(",
"recursive",
"=",
"True",
")"
] | Removes the specified readGroupSet from this repository. This performs
a cascading removal of all items within this readGroupSet. | [
"Removes",
"the",
"specified",
"readGroupSet",
"from",
"this",
"repository",
".",
"This",
"performs",
"a",
"cascading",
"removal",
"of",
"all",
"items",
"within",
"this",
"readGroupSet",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L929-L936 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.removeVariantSet | def removeVariantSet(self, variantSet):
"""
Removes the specified variantSet from this repository. This performs
a cascading removal of all items within this variantSet.
"""
for variantSetRecord in models.Variantset.select().where(
models.Variantset.id == variantSet.getId()):
variantSetRecord.delete_instance(recursive=True) | python | def removeVariantSet(self, variantSet):
"""
Removes the specified variantSet from this repository. This performs
a cascading removal of all items within this variantSet.
"""
for variantSetRecord in models.Variantset.select().where(
models.Variantset.id == variantSet.getId()):
variantSetRecord.delete_instance(recursive=True) | [
"def",
"removeVariantSet",
"(",
"self",
",",
"variantSet",
")",
":",
"for",
"variantSetRecord",
"in",
"models",
".",
"Variantset",
".",
"select",
"(",
")",
".",
"where",
"(",
"models",
".",
"Variantset",
".",
"id",
"==",
"variantSet",
".",
"getId",
"(",
")",
")",
":",
"variantSetRecord",
".",
"delete_instance",
"(",
"recursive",
"=",
"True",
")"
] | Removes the specified variantSet from this repository. This performs
a cascading removal of all items within this variantSet. | [
"Removes",
"the",
"specified",
"variantSet",
"from",
"this",
"repository",
".",
"This",
"performs",
"a",
"cascading",
"removal",
"of",
"all",
"items",
"within",
"this",
"variantSet",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L938-L945 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.removeBiosample | def removeBiosample(self, biosample):
"""
Removes the specified biosample from this repository.
"""
q = models.Biosample.delete().where(
models.Biosample.id == biosample.getId())
q.execute() | python | def removeBiosample(self, biosample):
"""
Removes the specified biosample from this repository.
"""
q = models.Biosample.delete().where(
models.Biosample.id == biosample.getId())
q.execute() | [
"def",
"removeBiosample",
"(",
"self",
",",
"biosample",
")",
":",
"q",
"=",
"models",
".",
"Biosample",
".",
"delete",
"(",
")",
".",
"where",
"(",
"models",
".",
"Biosample",
".",
"id",
"==",
"biosample",
".",
"getId",
"(",
")",
")",
"q",
".",
"execute",
"(",
")"
] | Removes the specified biosample from this repository. | [
"Removes",
"the",
"specified",
"biosample",
"from",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L947-L953 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.removeIndividual | def removeIndividual(self, individual):
"""
Removes the specified individual from this repository.
"""
q = models.Individual.delete().where(
models.Individual.id == individual.getId())
q.execute() | python | def removeIndividual(self, individual):
"""
Removes the specified individual from this repository.
"""
q = models.Individual.delete().where(
models.Individual.id == individual.getId())
q.execute() | [
"def",
"removeIndividual",
"(",
"self",
",",
"individual",
")",
":",
"q",
"=",
"models",
".",
"Individual",
".",
"delete",
"(",
")",
".",
"where",
"(",
"models",
".",
"Individual",
".",
"id",
"==",
"individual",
".",
"getId",
"(",
")",
")",
"q",
".",
"execute",
"(",
")"
] | Removes the specified individual from this repository. | [
"Removes",
"the",
"specified",
"individual",
"from",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L955-L961 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertReadGroupSet | def insertReadGroupSet(self, readGroupSet):
"""
Inserts a the specified readGroupSet into this repository.
"""
programsJson = json.dumps(
[protocol.toJsonDict(program) for program in
readGroupSet.getPrograms()])
statsJson = json.dumps(protocol.toJsonDict(readGroupSet.getStats()))
try:
models.Readgroupset.create(
id=readGroupSet.getId(),
datasetid=readGroupSet.getParentContainer().getId(),
referencesetid=readGroupSet.getReferenceSet().getId(),
name=readGroupSet.getLocalId(),
programs=programsJson,
stats=statsJson,
dataurl=readGroupSet.getDataUrl(),
indexfile=readGroupSet.getIndexFile(),
attributes=json.dumps(readGroupSet.getAttributes()))
for readGroup in readGroupSet.getReadGroups():
self.insertReadGroup(readGroup)
except Exception as e:
raise exceptions.RepoManagerException(e) | python | def insertReadGroupSet(self, readGroupSet):
"""
Inserts a the specified readGroupSet into this repository.
"""
programsJson = json.dumps(
[protocol.toJsonDict(program) for program in
readGroupSet.getPrograms()])
statsJson = json.dumps(protocol.toJsonDict(readGroupSet.getStats()))
try:
models.Readgroupset.create(
id=readGroupSet.getId(),
datasetid=readGroupSet.getParentContainer().getId(),
referencesetid=readGroupSet.getReferenceSet().getId(),
name=readGroupSet.getLocalId(),
programs=programsJson,
stats=statsJson,
dataurl=readGroupSet.getDataUrl(),
indexfile=readGroupSet.getIndexFile(),
attributes=json.dumps(readGroupSet.getAttributes()))
for readGroup in readGroupSet.getReadGroups():
self.insertReadGroup(readGroup)
except Exception as e:
raise exceptions.RepoManagerException(e) | [
"def",
"insertReadGroupSet",
"(",
"self",
",",
"readGroupSet",
")",
":",
"programsJson",
"=",
"json",
".",
"dumps",
"(",
"[",
"protocol",
".",
"toJsonDict",
"(",
"program",
")",
"for",
"program",
"in",
"readGroupSet",
".",
"getPrograms",
"(",
")",
"]",
")",
"statsJson",
"=",
"json",
".",
"dumps",
"(",
"protocol",
".",
"toJsonDict",
"(",
"readGroupSet",
".",
"getStats",
"(",
")",
")",
")",
"try",
":",
"models",
".",
"Readgroupset",
".",
"create",
"(",
"id",
"=",
"readGroupSet",
".",
"getId",
"(",
")",
",",
"datasetid",
"=",
"readGroupSet",
".",
"getParentContainer",
"(",
")",
".",
"getId",
"(",
")",
",",
"referencesetid",
"=",
"readGroupSet",
".",
"getReferenceSet",
"(",
")",
".",
"getId",
"(",
")",
",",
"name",
"=",
"readGroupSet",
".",
"getLocalId",
"(",
")",
",",
"programs",
"=",
"programsJson",
",",
"stats",
"=",
"statsJson",
",",
"dataurl",
"=",
"readGroupSet",
".",
"getDataUrl",
"(",
")",
",",
"indexfile",
"=",
"readGroupSet",
".",
"getIndexFile",
"(",
")",
",",
"attributes",
"=",
"json",
".",
"dumps",
"(",
"readGroupSet",
".",
"getAttributes",
"(",
")",
")",
")",
"for",
"readGroup",
"in",
"readGroupSet",
".",
"getReadGroups",
"(",
")",
":",
"self",
".",
"insertReadGroup",
"(",
"readGroup",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"exceptions",
".",
"RepoManagerException",
"(",
"e",
")"
] | Inserts a the specified readGroupSet into this repository. | [
"Inserts",
"a",
"the",
"specified",
"readGroupSet",
"into",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L978-L1000 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.removeReferenceSet | def removeReferenceSet(self, referenceSet):
"""
Removes the specified referenceSet from this repository. This performs
a cascading removal of all references within this referenceSet.
However, it does not remove any of the ReadGroupSets or items that
refer to this ReferenceSet. These must be deleted before the
referenceSet can be removed.
"""
try:
q = models.Reference.delete().where(
models.Reference.referencesetid == referenceSet.getId())
q.execute()
q = models.Referenceset.delete().where(
models.Referenceset.id == referenceSet.getId())
q.execute()
except Exception:
msg = ("Unable to delete reference set. "
"There are objects currently in the registry which are "
"aligned against it. Remove these objects before removing "
"the reference set.")
raise exceptions.RepoManagerException(msg) | python | def removeReferenceSet(self, referenceSet):
"""
Removes the specified referenceSet from this repository. This performs
a cascading removal of all references within this referenceSet.
However, it does not remove any of the ReadGroupSets or items that
refer to this ReferenceSet. These must be deleted before the
referenceSet can be removed.
"""
try:
q = models.Reference.delete().where(
models.Reference.referencesetid == referenceSet.getId())
q.execute()
q = models.Referenceset.delete().where(
models.Referenceset.id == referenceSet.getId())
q.execute()
except Exception:
msg = ("Unable to delete reference set. "
"There are objects currently in the registry which are "
"aligned against it. Remove these objects before removing "
"the reference set.")
raise exceptions.RepoManagerException(msg) | [
"def",
"removeReferenceSet",
"(",
"self",
",",
"referenceSet",
")",
":",
"try",
":",
"q",
"=",
"models",
".",
"Reference",
".",
"delete",
"(",
")",
".",
"where",
"(",
"models",
".",
"Reference",
".",
"referencesetid",
"==",
"referenceSet",
".",
"getId",
"(",
")",
")",
"q",
".",
"execute",
"(",
")",
"q",
"=",
"models",
".",
"Referenceset",
".",
"delete",
"(",
")",
".",
"where",
"(",
"models",
".",
"Referenceset",
".",
"id",
"==",
"referenceSet",
".",
"getId",
"(",
")",
")",
"q",
".",
"execute",
"(",
")",
"except",
"Exception",
":",
"msg",
"=",
"(",
"\"Unable to delete reference set. \"",
"\"There are objects currently in the registry which are \"",
"\"aligned against it. Remove these objects before removing \"",
"\"the reference set.\"",
")",
"raise",
"exceptions",
".",
"RepoManagerException",
"(",
"msg",
")"
] | Removes the specified referenceSet from this repository. This performs
a cascading removal of all references within this referenceSet.
However, it does not remove any of the ReadGroupSets or items that
refer to this ReferenceSet. These must be deleted before the
referenceSet can be removed. | [
"Removes",
"the",
"specified",
"referenceSet",
"from",
"this",
"repository",
".",
"This",
"performs",
"a",
"cascading",
"removal",
"of",
"all",
"references",
"within",
"this",
"referenceSet",
".",
"However",
"it",
"does",
"not",
"remove",
"any",
"of",
"the",
"ReadGroupSets",
"or",
"items",
"that",
"refer",
"to",
"this",
"ReferenceSet",
".",
"These",
"must",
"be",
"deleted",
"before",
"the",
"referenceSet",
"can",
"be",
"removed",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1002-L1022 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertVariantAnnotationSet | def insertVariantAnnotationSet(self, variantAnnotationSet):
"""
Inserts a the specified variantAnnotationSet into this repository.
"""
analysisJson = json.dumps(
protocol.toJsonDict(variantAnnotationSet.getAnalysis()))
try:
models.Variantannotationset.create(
id=variantAnnotationSet.getId(),
variantsetid=variantAnnotationSet.getParentContainer().getId(),
ontologyid=variantAnnotationSet.getOntology().getId(),
name=variantAnnotationSet.getLocalId(),
analysis=analysisJson,
annotationtype=variantAnnotationSet.getAnnotationType(),
created=variantAnnotationSet.getCreationTime(),
updated=variantAnnotationSet.getUpdatedTime(),
attributes=json.dumps(variantAnnotationSet.getAttributes()))
except Exception as e:
raise exceptions.RepoManagerException(e) | python | def insertVariantAnnotationSet(self, variantAnnotationSet):
"""
Inserts a the specified variantAnnotationSet into this repository.
"""
analysisJson = json.dumps(
protocol.toJsonDict(variantAnnotationSet.getAnalysis()))
try:
models.Variantannotationset.create(
id=variantAnnotationSet.getId(),
variantsetid=variantAnnotationSet.getParentContainer().getId(),
ontologyid=variantAnnotationSet.getOntology().getId(),
name=variantAnnotationSet.getLocalId(),
analysis=analysisJson,
annotationtype=variantAnnotationSet.getAnnotationType(),
created=variantAnnotationSet.getCreationTime(),
updated=variantAnnotationSet.getUpdatedTime(),
attributes=json.dumps(variantAnnotationSet.getAttributes()))
except Exception as e:
raise exceptions.RepoManagerException(e) | [
"def",
"insertVariantAnnotationSet",
"(",
"self",
",",
"variantAnnotationSet",
")",
":",
"analysisJson",
"=",
"json",
".",
"dumps",
"(",
"protocol",
".",
"toJsonDict",
"(",
"variantAnnotationSet",
".",
"getAnalysis",
"(",
")",
")",
")",
"try",
":",
"models",
".",
"Variantannotationset",
".",
"create",
"(",
"id",
"=",
"variantAnnotationSet",
".",
"getId",
"(",
")",
",",
"variantsetid",
"=",
"variantAnnotationSet",
".",
"getParentContainer",
"(",
")",
".",
"getId",
"(",
")",
",",
"ontologyid",
"=",
"variantAnnotationSet",
".",
"getOntology",
"(",
")",
".",
"getId",
"(",
")",
",",
"name",
"=",
"variantAnnotationSet",
".",
"getLocalId",
"(",
")",
",",
"analysis",
"=",
"analysisJson",
",",
"annotationtype",
"=",
"variantAnnotationSet",
".",
"getAnnotationType",
"(",
")",
",",
"created",
"=",
"variantAnnotationSet",
".",
"getCreationTime",
"(",
")",
",",
"updated",
"=",
"variantAnnotationSet",
".",
"getUpdatedTime",
"(",
")",
",",
"attributes",
"=",
"json",
".",
"dumps",
"(",
"variantAnnotationSet",
".",
"getAttributes",
"(",
")",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"exceptions",
".",
"RepoManagerException",
"(",
"e",
")"
] | Inserts a the specified variantAnnotationSet into this repository. | [
"Inserts",
"a",
"the",
"specified",
"variantAnnotationSet",
"into",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1040-L1058 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertCallSet | def insertCallSet(self, callSet):
"""
Inserts a the specified callSet into this repository.
"""
try:
models.Callset.create(
id=callSet.getId(),
name=callSet.getLocalId(),
variantsetid=callSet.getParentContainer().getId(),
biosampleid=callSet.getBiosampleId(),
attributes=json.dumps(callSet.getAttributes()))
except Exception as e:
raise exceptions.RepoManagerException(e) | python | def insertCallSet(self, callSet):
"""
Inserts a the specified callSet into this repository.
"""
try:
models.Callset.create(
id=callSet.getId(),
name=callSet.getLocalId(),
variantsetid=callSet.getParentContainer().getId(),
biosampleid=callSet.getBiosampleId(),
attributes=json.dumps(callSet.getAttributes()))
except Exception as e:
raise exceptions.RepoManagerException(e) | [
"def",
"insertCallSet",
"(",
"self",
",",
"callSet",
")",
":",
"try",
":",
"models",
".",
"Callset",
".",
"create",
"(",
"id",
"=",
"callSet",
".",
"getId",
"(",
")",
",",
"name",
"=",
"callSet",
".",
"getLocalId",
"(",
")",
",",
"variantsetid",
"=",
"callSet",
".",
"getParentContainer",
"(",
")",
".",
"getId",
"(",
")",
",",
"biosampleid",
"=",
"callSet",
".",
"getBiosampleId",
"(",
")",
",",
"attributes",
"=",
"json",
".",
"dumps",
"(",
"callSet",
".",
"getAttributes",
"(",
")",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"exceptions",
".",
"RepoManagerException",
"(",
"e",
")"
] | Inserts a the specified callSet into this repository. | [
"Inserts",
"a",
"the",
"specified",
"callSet",
"into",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1076-L1088 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertVariantSet | def insertVariantSet(self, variantSet):
"""
Inserts a the specified variantSet into this repository.
"""
# We cheat a little here with the VariantSetMetadata, and encode these
# within the table as a JSON dump. These should really be stored in
# their own table
metadataJson = json.dumps(
[protocol.toJsonDict(metadata) for metadata in
variantSet.getMetadata()])
urlMapJson = json.dumps(variantSet.getReferenceToDataUrlIndexMap())
try:
models.Variantset.create(
id=variantSet.getId(),
datasetid=variantSet.getParentContainer().getId(),
referencesetid=variantSet.getReferenceSet().getId(),
name=variantSet.getLocalId(),
created=datetime.datetime.now(),
updated=datetime.datetime.now(),
metadata=metadataJson,
dataurlindexmap=urlMapJson,
attributes=json.dumps(variantSet.getAttributes()))
except Exception as e:
raise exceptions.RepoManagerException(e)
for callSet in variantSet.getCallSets():
self.insertCallSet(callSet) | python | def insertVariantSet(self, variantSet):
"""
Inserts a the specified variantSet into this repository.
"""
# We cheat a little here with the VariantSetMetadata, and encode these
# within the table as a JSON dump. These should really be stored in
# their own table
metadataJson = json.dumps(
[protocol.toJsonDict(metadata) for metadata in
variantSet.getMetadata()])
urlMapJson = json.dumps(variantSet.getReferenceToDataUrlIndexMap())
try:
models.Variantset.create(
id=variantSet.getId(),
datasetid=variantSet.getParentContainer().getId(),
referencesetid=variantSet.getReferenceSet().getId(),
name=variantSet.getLocalId(),
created=datetime.datetime.now(),
updated=datetime.datetime.now(),
metadata=metadataJson,
dataurlindexmap=urlMapJson,
attributes=json.dumps(variantSet.getAttributes()))
except Exception as e:
raise exceptions.RepoManagerException(e)
for callSet in variantSet.getCallSets():
self.insertCallSet(callSet) | [
"def",
"insertVariantSet",
"(",
"self",
",",
"variantSet",
")",
":",
"# We cheat a little here with the VariantSetMetadata, and encode these",
"# within the table as a JSON dump. These should really be stored in",
"# their own table",
"metadataJson",
"=",
"json",
".",
"dumps",
"(",
"[",
"protocol",
".",
"toJsonDict",
"(",
"metadata",
")",
"for",
"metadata",
"in",
"variantSet",
".",
"getMetadata",
"(",
")",
"]",
")",
"urlMapJson",
"=",
"json",
".",
"dumps",
"(",
"variantSet",
".",
"getReferenceToDataUrlIndexMap",
"(",
")",
")",
"try",
":",
"models",
".",
"Variantset",
".",
"create",
"(",
"id",
"=",
"variantSet",
".",
"getId",
"(",
")",
",",
"datasetid",
"=",
"variantSet",
".",
"getParentContainer",
"(",
")",
".",
"getId",
"(",
")",
",",
"referencesetid",
"=",
"variantSet",
".",
"getReferenceSet",
"(",
")",
".",
"getId",
"(",
")",
",",
"name",
"=",
"variantSet",
".",
"getLocalId",
"(",
")",
",",
"created",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
",",
"updated",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
",",
"metadata",
"=",
"metadataJson",
",",
"dataurlindexmap",
"=",
"urlMapJson",
",",
"attributes",
"=",
"json",
".",
"dumps",
"(",
"variantSet",
".",
"getAttributes",
"(",
")",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"exceptions",
".",
"RepoManagerException",
"(",
"e",
")",
"for",
"callSet",
"in",
"variantSet",
".",
"getCallSets",
"(",
")",
":",
"self",
".",
"insertCallSet",
"(",
"callSet",
")"
] | Inserts a the specified variantSet into this repository. | [
"Inserts",
"a",
"the",
"specified",
"variantSet",
"into",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1102-L1127 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertFeatureSet | def insertFeatureSet(self, featureSet):
"""
Inserts a the specified featureSet into this repository.
"""
# TODO add support for info and sourceUri fields.
try:
models.Featureset.create(
id=featureSet.getId(),
datasetid=featureSet.getParentContainer().getId(),
referencesetid=featureSet.getReferenceSet().getId(),
ontologyid=featureSet.getOntology().getId(),
name=featureSet.getLocalId(),
dataurl=featureSet.getDataUrl(),
attributes=json.dumps(featureSet.getAttributes()))
except Exception as e:
raise exceptions.RepoManagerException(e) | python | def insertFeatureSet(self, featureSet):
"""
Inserts a the specified featureSet into this repository.
"""
# TODO add support for info and sourceUri fields.
try:
models.Featureset.create(
id=featureSet.getId(),
datasetid=featureSet.getParentContainer().getId(),
referencesetid=featureSet.getReferenceSet().getId(),
ontologyid=featureSet.getOntology().getId(),
name=featureSet.getLocalId(),
dataurl=featureSet.getDataUrl(),
attributes=json.dumps(featureSet.getAttributes()))
except Exception as e:
raise exceptions.RepoManagerException(e) | [
"def",
"insertFeatureSet",
"(",
"self",
",",
"featureSet",
")",
":",
"# TODO add support for info and sourceUri fields.",
"try",
":",
"models",
".",
"Featureset",
".",
"create",
"(",
"id",
"=",
"featureSet",
".",
"getId",
"(",
")",
",",
"datasetid",
"=",
"featureSet",
".",
"getParentContainer",
"(",
")",
".",
"getId",
"(",
")",
",",
"referencesetid",
"=",
"featureSet",
".",
"getReferenceSet",
"(",
")",
".",
"getId",
"(",
")",
",",
"ontologyid",
"=",
"featureSet",
".",
"getOntology",
"(",
")",
".",
"getId",
"(",
")",
",",
"name",
"=",
"featureSet",
".",
"getLocalId",
"(",
")",
",",
"dataurl",
"=",
"featureSet",
".",
"getDataUrl",
"(",
")",
",",
"attributes",
"=",
"json",
".",
"dumps",
"(",
"featureSet",
".",
"getAttributes",
"(",
")",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"exceptions",
".",
"RepoManagerException",
"(",
"e",
")"
] | Inserts a the specified featureSet into this repository. | [
"Inserts",
"a",
"the",
"specified",
"featureSet",
"into",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1145-L1160 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertContinuousSet | def insertContinuousSet(self, continuousSet):
"""
Inserts a the specified continuousSet into this repository.
"""
# TODO add support for info and sourceUri fields.
try:
models.ContinuousSet.create(
id=continuousSet.getId(),
datasetid=continuousSet.getParentContainer().getId(),
referencesetid=continuousSet.getReferenceSet().getId(),
name=continuousSet.getLocalId(),
dataurl=continuousSet.getDataUrl(),
attributes=json.dumps(continuousSet.getAttributes()))
except Exception as e:
raise exceptions.RepoManagerException(e) | python | def insertContinuousSet(self, continuousSet):
"""
Inserts a the specified continuousSet into this repository.
"""
# TODO add support for info and sourceUri fields.
try:
models.ContinuousSet.create(
id=continuousSet.getId(),
datasetid=continuousSet.getParentContainer().getId(),
referencesetid=continuousSet.getReferenceSet().getId(),
name=continuousSet.getLocalId(),
dataurl=continuousSet.getDataUrl(),
attributes=json.dumps(continuousSet.getAttributes()))
except Exception as e:
raise exceptions.RepoManagerException(e) | [
"def",
"insertContinuousSet",
"(",
"self",
",",
"continuousSet",
")",
":",
"# TODO add support for info and sourceUri fields.",
"try",
":",
"models",
".",
"ContinuousSet",
".",
"create",
"(",
"id",
"=",
"continuousSet",
".",
"getId",
"(",
")",
",",
"datasetid",
"=",
"continuousSet",
".",
"getParentContainer",
"(",
")",
".",
"getId",
"(",
")",
",",
"referencesetid",
"=",
"continuousSet",
".",
"getReferenceSet",
"(",
")",
".",
"getId",
"(",
")",
",",
"name",
"=",
"continuousSet",
".",
"getLocalId",
"(",
")",
",",
"dataurl",
"=",
"continuousSet",
".",
"getDataUrl",
"(",
")",
",",
"attributes",
"=",
"json",
".",
"dumps",
"(",
"continuousSet",
".",
"getAttributes",
"(",
")",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"exceptions",
".",
"RepoManagerException",
"(",
"e",
")"
] | Inserts a the specified continuousSet into this repository. | [
"Inserts",
"a",
"the",
"specified",
"continuousSet",
"into",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1186-L1200 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertBiosample | def insertBiosample(self, biosample):
"""
Inserts the specified Biosample into this repository.
"""
try:
models.Biosample.create(
id=biosample.getId(),
datasetid=biosample.getParentContainer().getId(),
name=biosample.getLocalId(),
description=biosample.getDescription(),
disease=json.dumps(biosample.getDisease()),
created=biosample.getCreated(),
updated=biosample.getUpdated(),
individualid=biosample.getIndividualId(),
attributes=json.dumps(biosample.getAttributes()),
individualAgeAtCollection=json.dumps(
biosample.getIndividualAgeAtCollection()))
except Exception:
raise exceptions.DuplicateNameException(
biosample.getLocalId(),
biosample.getParentContainer().getLocalId()) | python | def insertBiosample(self, biosample):
"""
Inserts the specified Biosample into this repository.
"""
try:
models.Biosample.create(
id=biosample.getId(),
datasetid=biosample.getParentContainer().getId(),
name=biosample.getLocalId(),
description=biosample.getDescription(),
disease=json.dumps(biosample.getDisease()),
created=biosample.getCreated(),
updated=biosample.getUpdated(),
individualid=biosample.getIndividualId(),
attributes=json.dumps(biosample.getAttributes()),
individualAgeAtCollection=json.dumps(
biosample.getIndividualAgeAtCollection()))
except Exception:
raise exceptions.DuplicateNameException(
biosample.getLocalId(),
biosample.getParentContainer().getLocalId()) | [
"def",
"insertBiosample",
"(",
"self",
",",
"biosample",
")",
":",
"try",
":",
"models",
".",
"Biosample",
".",
"create",
"(",
"id",
"=",
"biosample",
".",
"getId",
"(",
")",
",",
"datasetid",
"=",
"biosample",
".",
"getParentContainer",
"(",
")",
".",
"getId",
"(",
")",
",",
"name",
"=",
"biosample",
".",
"getLocalId",
"(",
")",
",",
"description",
"=",
"biosample",
".",
"getDescription",
"(",
")",
",",
"disease",
"=",
"json",
".",
"dumps",
"(",
"biosample",
".",
"getDisease",
"(",
")",
")",
",",
"created",
"=",
"biosample",
".",
"getCreated",
"(",
")",
",",
"updated",
"=",
"biosample",
".",
"getUpdated",
"(",
")",
",",
"individualid",
"=",
"biosample",
".",
"getIndividualId",
"(",
")",
",",
"attributes",
"=",
"json",
".",
"dumps",
"(",
"biosample",
".",
"getAttributes",
"(",
")",
")",
",",
"individualAgeAtCollection",
"=",
"json",
".",
"dumps",
"(",
"biosample",
".",
"getIndividualAgeAtCollection",
"(",
")",
")",
")",
"except",
"Exception",
":",
"raise",
"exceptions",
".",
"DuplicateNameException",
"(",
"biosample",
".",
"getLocalId",
"(",
")",
",",
"biosample",
".",
"getParentContainer",
"(",
")",
".",
"getLocalId",
"(",
")",
")"
] | Inserts the specified Biosample into this repository. | [
"Inserts",
"the",
"specified",
"Biosample",
"into",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1217-L1237 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertIndividual | def insertIndividual(self, individual):
"""
Inserts the specified individual into this repository.
"""
try:
models.Individual.create(
id=individual.getId(),
datasetId=individual.getParentContainer().getId(),
name=individual.getLocalId(),
description=individual.getDescription(),
created=individual.getCreated(),
updated=individual.getUpdated(),
species=json.dumps(individual.getSpecies()),
sex=json.dumps(individual.getSex()),
attributes=json.dumps(individual.getAttributes()))
except Exception:
raise exceptions.DuplicateNameException(
individual.getLocalId(),
individual.getParentContainer().getLocalId()) | python | def insertIndividual(self, individual):
"""
Inserts the specified individual into this repository.
"""
try:
models.Individual.create(
id=individual.getId(),
datasetId=individual.getParentContainer().getId(),
name=individual.getLocalId(),
description=individual.getDescription(),
created=individual.getCreated(),
updated=individual.getUpdated(),
species=json.dumps(individual.getSpecies()),
sex=json.dumps(individual.getSex()),
attributes=json.dumps(individual.getAttributes()))
except Exception:
raise exceptions.DuplicateNameException(
individual.getLocalId(),
individual.getParentContainer().getLocalId()) | [
"def",
"insertIndividual",
"(",
"self",
",",
"individual",
")",
":",
"try",
":",
"models",
".",
"Individual",
".",
"create",
"(",
"id",
"=",
"individual",
".",
"getId",
"(",
")",
",",
"datasetId",
"=",
"individual",
".",
"getParentContainer",
"(",
")",
".",
"getId",
"(",
")",
",",
"name",
"=",
"individual",
".",
"getLocalId",
"(",
")",
",",
"description",
"=",
"individual",
".",
"getDescription",
"(",
")",
",",
"created",
"=",
"individual",
".",
"getCreated",
"(",
")",
",",
"updated",
"=",
"individual",
".",
"getUpdated",
"(",
")",
",",
"species",
"=",
"json",
".",
"dumps",
"(",
"individual",
".",
"getSpecies",
"(",
")",
")",
",",
"sex",
"=",
"json",
".",
"dumps",
"(",
"individual",
".",
"getSex",
"(",
")",
")",
",",
"attributes",
"=",
"json",
".",
"dumps",
"(",
"individual",
".",
"getAttributes",
"(",
")",
")",
")",
"except",
"Exception",
":",
"raise",
"exceptions",
".",
"DuplicateNameException",
"(",
"individual",
".",
"getLocalId",
"(",
")",
",",
"individual",
".",
"getParentContainer",
"(",
")",
".",
"getLocalId",
"(",
")",
")"
] | Inserts the specified individual into this repository. | [
"Inserts",
"the",
"specified",
"individual",
"into",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1251-L1269 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertPhenotypeAssociationSet | def insertPhenotypeAssociationSet(self, phenotypeAssociationSet):
"""
Inserts the specified phenotype annotation set into this repository.
"""
datasetId = phenotypeAssociationSet.getParentContainer().getId()
attributes = json.dumps(phenotypeAssociationSet.getAttributes())
try:
models.Phenotypeassociationset.create(
id=phenotypeAssociationSet.getId(),
name=phenotypeAssociationSet.getLocalId(),
datasetid=datasetId,
dataurl=phenotypeAssociationSet._dataUrl,
attributes=attributes)
except Exception:
raise exceptions.DuplicateNameException(
phenotypeAssociationSet.getParentContainer().getId()) | python | def insertPhenotypeAssociationSet(self, phenotypeAssociationSet):
"""
Inserts the specified phenotype annotation set into this repository.
"""
datasetId = phenotypeAssociationSet.getParentContainer().getId()
attributes = json.dumps(phenotypeAssociationSet.getAttributes())
try:
models.Phenotypeassociationset.create(
id=phenotypeAssociationSet.getId(),
name=phenotypeAssociationSet.getLocalId(),
datasetid=datasetId,
dataurl=phenotypeAssociationSet._dataUrl,
attributes=attributes)
except Exception:
raise exceptions.DuplicateNameException(
phenotypeAssociationSet.getParentContainer().getId()) | [
"def",
"insertPhenotypeAssociationSet",
"(",
"self",
",",
"phenotypeAssociationSet",
")",
":",
"datasetId",
"=",
"phenotypeAssociationSet",
".",
"getParentContainer",
"(",
")",
".",
"getId",
"(",
")",
"attributes",
"=",
"json",
".",
"dumps",
"(",
"phenotypeAssociationSet",
".",
"getAttributes",
"(",
")",
")",
"try",
":",
"models",
".",
"Phenotypeassociationset",
".",
"create",
"(",
"id",
"=",
"phenotypeAssociationSet",
".",
"getId",
"(",
")",
",",
"name",
"=",
"phenotypeAssociationSet",
".",
"getLocalId",
"(",
")",
",",
"datasetid",
"=",
"datasetId",
",",
"dataurl",
"=",
"phenotypeAssociationSet",
".",
"_dataUrl",
",",
"attributes",
"=",
"attributes",
")",
"except",
"Exception",
":",
"raise",
"exceptions",
".",
"DuplicateNameException",
"(",
"phenotypeAssociationSet",
".",
"getParentContainer",
"(",
")",
".",
"getId",
"(",
")",
")"
] | Inserts the specified phenotype annotation set into this repository. | [
"Inserts",
"the",
"specified",
"phenotype",
"annotation",
"set",
"into",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1286-L1301 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertRnaQuantificationSet | def insertRnaQuantificationSet(self, rnaQuantificationSet):
"""
Inserts a the specified rnaQuantificationSet into this repository.
"""
try:
models.Rnaquantificationset.create(
id=rnaQuantificationSet.getId(),
datasetid=rnaQuantificationSet.getParentContainer().getId(),
referencesetid=rnaQuantificationSet.getReferenceSet().getId(),
name=rnaQuantificationSet.getLocalId(),
dataurl=rnaQuantificationSet.getDataUrl(),
attributes=json.dumps(rnaQuantificationSet.getAttributes()))
except Exception:
raise exceptions.DuplicateNameException(
rnaQuantificationSet.getLocalId(),
rnaQuantificationSet.getParentContainer().getLocalId()) | python | def insertRnaQuantificationSet(self, rnaQuantificationSet):
"""
Inserts a the specified rnaQuantificationSet into this repository.
"""
try:
models.Rnaquantificationset.create(
id=rnaQuantificationSet.getId(),
datasetid=rnaQuantificationSet.getParentContainer().getId(),
referencesetid=rnaQuantificationSet.getReferenceSet().getId(),
name=rnaQuantificationSet.getLocalId(),
dataurl=rnaQuantificationSet.getDataUrl(),
attributes=json.dumps(rnaQuantificationSet.getAttributes()))
except Exception:
raise exceptions.DuplicateNameException(
rnaQuantificationSet.getLocalId(),
rnaQuantificationSet.getParentContainer().getLocalId()) | [
"def",
"insertRnaQuantificationSet",
"(",
"self",
",",
"rnaQuantificationSet",
")",
":",
"try",
":",
"models",
".",
"Rnaquantificationset",
".",
"create",
"(",
"id",
"=",
"rnaQuantificationSet",
".",
"getId",
"(",
")",
",",
"datasetid",
"=",
"rnaQuantificationSet",
".",
"getParentContainer",
"(",
")",
".",
"getId",
"(",
")",
",",
"referencesetid",
"=",
"rnaQuantificationSet",
".",
"getReferenceSet",
"(",
")",
".",
"getId",
"(",
")",
",",
"name",
"=",
"rnaQuantificationSet",
".",
"getLocalId",
"(",
")",
",",
"dataurl",
"=",
"rnaQuantificationSet",
".",
"getDataUrl",
"(",
")",
",",
"attributes",
"=",
"json",
".",
"dumps",
"(",
"rnaQuantificationSet",
".",
"getAttributes",
"(",
")",
")",
")",
"except",
"Exception",
":",
"raise",
"exceptions",
".",
"DuplicateNameException",
"(",
"rnaQuantificationSet",
".",
"getLocalId",
"(",
")",
",",
"rnaQuantificationSet",
".",
"getParentContainer",
"(",
")",
".",
"getLocalId",
"(",
")",
")"
] | Inserts a the specified rnaQuantificationSet into this repository. | [
"Inserts",
"a",
"the",
"specified",
"rnaQuantificationSet",
"into",
"this",
"repository",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1313-L1328 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.removeRnaQuantificationSet | def removeRnaQuantificationSet(self, rnaQuantificationSet):
"""
Removes the specified rnaQuantificationSet from this repository. This
performs a cascading removal of all items within this
rnaQuantificationSet.
"""
q = models.Rnaquantificationset.delete().where(
models.Rnaquantificationset.id == rnaQuantificationSet.getId())
q.execute() | python | def removeRnaQuantificationSet(self, rnaQuantificationSet):
"""
Removes the specified rnaQuantificationSet from this repository. This
performs a cascading removal of all items within this
rnaQuantificationSet.
"""
q = models.Rnaquantificationset.delete().where(
models.Rnaquantificationset.id == rnaQuantificationSet.getId())
q.execute() | [
"def",
"removeRnaQuantificationSet",
"(",
"self",
",",
"rnaQuantificationSet",
")",
":",
"q",
"=",
"models",
".",
"Rnaquantificationset",
".",
"delete",
"(",
")",
".",
"where",
"(",
"models",
".",
"Rnaquantificationset",
".",
"id",
"==",
"rnaQuantificationSet",
".",
"getId",
"(",
")",
")",
"q",
".",
"execute",
"(",
")"
] | Removes the specified rnaQuantificationSet from this repository. This
performs a cascading removal of all items within this
rnaQuantificationSet. | [
"Removes",
"the",
"specified",
"rnaQuantificationSet",
"from",
"this",
"repository",
".",
"This",
"performs",
"a",
"cascading",
"removal",
"of",
"all",
"items",
"within",
"this",
"rnaQuantificationSet",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1343-L1351 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.insertPeer | def insertPeer(self, peer):
"""
Accepts a peer datamodel object and adds it to the registry.
"""
try:
models.Peer.create(
url=peer.getUrl(),
attributes=json.dumps(peer.getAttributes()))
except Exception as e:
raise exceptions.RepoManagerException(e) | python | def insertPeer(self, peer):
"""
Accepts a peer datamodel object and adds it to the registry.
"""
try:
models.Peer.create(
url=peer.getUrl(),
attributes=json.dumps(peer.getAttributes()))
except Exception as e:
raise exceptions.RepoManagerException(e) | [
"def",
"insertPeer",
"(",
"self",
",",
"peer",
")",
":",
"try",
":",
"models",
".",
"Peer",
".",
"create",
"(",
"url",
"=",
"peer",
".",
"getUrl",
"(",
")",
",",
"attributes",
"=",
"json",
".",
"dumps",
"(",
"peer",
".",
"getAttributes",
"(",
")",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"exceptions",
".",
"RepoManagerException",
"(",
"e",
")"
] | Accepts a peer datamodel object and adds it to the registry. | [
"Accepts",
"a",
"peer",
"datamodel",
"object",
"and",
"adds",
"it",
"to",
"the",
"registry",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1353-L1362 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.removePeer | def removePeer(self, url):
"""
Remove peers by URL.
"""
q = models.Peer.delete().where(
models.Peer.url == url)
q.execute() | python | def removePeer(self, url):
"""
Remove peers by URL.
"""
q = models.Peer.delete().where(
models.Peer.url == url)
q.execute() | [
"def",
"removePeer",
"(",
"self",
",",
"url",
")",
":",
"q",
"=",
"models",
".",
"Peer",
".",
"delete",
"(",
")",
".",
"where",
"(",
"models",
".",
"Peer",
".",
"url",
"==",
"url",
")",
"q",
".",
"execute",
"(",
")"
] | Remove peers by URL. | [
"Remove",
"peers",
"by",
"URL",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1364-L1370 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.initialise | def initialise(self):
"""
Initialise this data repository, creating any necessary directories
and file paths.
"""
self._checkWriteMode()
self._createSystemTable()
self._createNetworkTables()
self._createOntologyTable()
self._createReferenceSetTable()
self._createReferenceTable()
self._createDatasetTable()
self._createReadGroupSetTable()
self._createReadGroupTable()
self._createCallSetTable()
self._createVariantSetTable()
self._createVariantAnnotationSetTable()
self._createFeatureSetTable()
self._createContinuousSetTable()
self._createBiosampleTable()
self._createIndividualTable()
self._createPhenotypeAssociationSetTable()
self._createRnaQuantificationSetTable() | python | def initialise(self):
"""
Initialise this data repository, creating any necessary directories
and file paths.
"""
self._checkWriteMode()
self._createSystemTable()
self._createNetworkTables()
self._createOntologyTable()
self._createReferenceSetTable()
self._createReferenceTable()
self._createDatasetTable()
self._createReadGroupSetTable()
self._createReadGroupTable()
self._createCallSetTable()
self._createVariantSetTable()
self._createVariantAnnotationSetTable()
self._createFeatureSetTable()
self._createContinuousSetTable()
self._createBiosampleTable()
self._createIndividualTable()
self._createPhenotypeAssociationSetTable()
self._createRnaQuantificationSetTable() | [
"def",
"initialise",
"(",
"self",
")",
":",
"self",
".",
"_checkWriteMode",
"(",
")",
"self",
".",
"_createSystemTable",
"(",
")",
"self",
".",
"_createNetworkTables",
"(",
")",
"self",
".",
"_createOntologyTable",
"(",
")",
"self",
".",
"_createReferenceSetTable",
"(",
")",
"self",
".",
"_createReferenceTable",
"(",
")",
"self",
".",
"_createDatasetTable",
"(",
")",
"self",
".",
"_createReadGroupSetTable",
"(",
")",
"self",
".",
"_createReadGroupTable",
"(",
")",
"self",
".",
"_createCallSetTable",
"(",
")",
"self",
".",
"_createVariantSetTable",
"(",
")",
"self",
".",
"_createVariantAnnotationSetTable",
"(",
")",
"self",
".",
"_createFeatureSetTable",
"(",
")",
"self",
".",
"_createContinuousSetTable",
"(",
")",
"self",
".",
"_createBiosampleTable",
"(",
")",
"self",
".",
"_createIndividualTable",
"(",
")",
"self",
".",
"_createPhenotypeAssociationSetTable",
"(",
")",
"self",
".",
"_createRnaQuantificationSetTable",
"(",
")"
] | Initialise this data repository, creating any necessary directories
and file paths. | [
"Initialise",
"this",
"data",
"repository",
"creating",
"any",
"necessary",
"directories",
"and",
"file",
"paths",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1377-L1399 | train |
ga4gh/ga4gh-server | ga4gh/server/datarepo.py | SqlDataRepository.load | def load(self):
"""
Loads this data repository into memory.
"""
self._readSystemTable()
self._readOntologyTable()
self._readReferenceSetTable()
self._readReferenceTable()
self._readDatasetTable()
self._readReadGroupSetTable()
self._readReadGroupTable()
self._readVariantSetTable()
self._readCallSetTable()
self._readVariantAnnotationSetTable()
self._readFeatureSetTable()
self._readContinuousSetTable()
self._readBiosampleTable()
self._readIndividualTable()
self._readPhenotypeAssociationSetTable()
self._readRnaQuantificationSetTable() | python | def load(self):
"""
Loads this data repository into memory.
"""
self._readSystemTable()
self._readOntologyTable()
self._readReferenceSetTable()
self._readReferenceTable()
self._readDatasetTable()
self._readReadGroupSetTable()
self._readReadGroupTable()
self._readVariantSetTable()
self._readCallSetTable()
self._readVariantAnnotationSetTable()
self._readFeatureSetTable()
self._readContinuousSetTable()
self._readBiosampleTable()
self._readIndividualTable()
self._readPhenotypeAssociationSetTable()
self._readRnaQuantificationSetTable() | [
"def",
"load",
"(",
"self",
")",
":",
"self",
".",
"_readSystemTable",
"(",
")",
"self",
".",
"_readOntologyTable",
"(",
")",
"self",
".",
"_readReferenceSetTable",
"(",
")",
"self",
".",
"_readReferenceTable",
"(",
")",
"self",
".",
"_readDatasetTable",
"(",
")",
"self",
".",
"_readReadGroupSetTable",
"(",
")",
"self",
".",
"_readReadGroupTable",
"(",
")",
"self",
".",
"_readVariantSetTable",
"(",
")",
"self",
".",
"_readCallSetTable",
"(",
")",
"self",
".",
"_readVariantAnnotationSetTable",
"(",
")",
"self",
".",
"_readFeatureSetTable",
"(",
")",
"self",
".",
"_readContinuousSetTable",
"(",
")",
"self",
".",
"_readBiosampleTable",
"(",
")",
"self",
".",
"_readIndividualTable",
"(",
")",
"self",
".",
"_readPhenotypeAssociationSetTable",
"(",
")",
"self",
".",
"_readRnaQuantificationSetTable",
"(",
")"
] | Loads this data repository into memory. | [
"Loads",
"this",
"data",
"repository",
"into",
"memory",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1421-L1440 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/genotype_phenotype_featureset.py | PhenotypeAssociationFeatureSet.populateFromRow | def populateFromRow(self, featureSetRecord):
"""
Populates the instance variables of this FeatureSet from the specified
DB row.
"""
self._dbFilePath = featureSetRecord.dataurl
self.setAttributesJson(featureSetRecord.attributes)
self.populateFromFile(self._dbFilePath) | python | def populateFromRow(self, featureSetRecord):
"""
Populates the instance variables of this FeatureSet from the specified
DB row.
"""
self._dbFilePath = featureSetRecord.dataurl
self.setAttributesJson(featureSetRecord.attributes)
self.populateFromFile(self._dbFilePath) | [
"def",
"populateFromRow",
"(",
"self",
",",
"featureSetRecord",
")",
":",
"self",
".",
"_dbFilePath",
"=",
"featureSetRecord",
".",
"dataurl",
"self",
".",
"setAttributesJson",
"(",
"featureSetRecord",
".",
"attributes",
")",
"self",
".",
"populateFromFile",
"(",
"self",
".",
"_dbFilePath",
")"
] | Populates the instance variables of this FeatureSet from the specified
DB row. | [
"Populates",
"the",
"instance",
"variables",
"of",
"this",
"FeatureSet",
"from",
"the",
"specified",
"DB",
"row",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/genotype_phenotype_featureset.py#L48-L55 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/genotype_phenotype_featureset.py | PhenotypeAssociationFeatureSet.populateFromFile | def populateFromFile(self, dataUrl):
"""
Populates the instance variables of this FeatureSet from the specified
data URL.
Initialize dataset, using the passed dict of sources
[{source,format}] see rdflib.parse() for more
If path is set, this backend will load itself
"""
self._dbFilePath = dataUrl
# initialize graph
self._rdfGraph = rdflib.ConjunctiveGraph()
# save the path
self._dataUrl = dataUrl
self._scanDataFiles(self._dataUrl, ['*.ttl'])
# extract version
cgdTTL = rdflib.URIRef("http://data.monarchinitiative.org/ttl/cgd.ttl")
versionInfo = rdflib.URIRef(
u'http://www.w3.org/2002/07/owl#versionInfo')
self._version = None
for _, _, obj in self._rdfGraph.triples((cgdTTL, versionInfo, None)):
self._version = obj.toPython()
# setup location cache
self._initializeLocationCache() | python | def populateFromFile(self, dataUrl):
"""
Populates the instance variables of this FeatureSet from the specified
data URL.
Initialize dataset, using the passed dict of sources
[{source,format}] see rdflib.parse() for more
If path is set, this backend will load itself
"""
self._dbFilePath = dataUrl
# initialize graph
self._rdfGraph = rdflib.ConjunctiveGraph()
# save the path
self._dataUrl = dataUrl
self._scanDataFiles(self._dataUrl, ['*.ttl'])
# extract version
cgdTTL = rdflib.URIRef("http://data.monarchinitiative.org/ttl/cgd.ttl")
versionInfo = rdflib.URIRef(
u'http://www.w3.org/2002/07/owl#versionInfo')
self._version = None
for _, _, obj in self._rdfGraph.triples((cgdTTL, versionInfo, None)):
self._version = obj.toPython()
# setup location cache
self._initializeLocationCache() | [
"def",
"populateFromFile",
"(",
"self",
",",
"dataUrl",
")",
":",
"self",
".",
"_dbFilePath",
"=",
"dataUrl",
"# initialize graph",
"self",
".",
"_rdfGraph",
"=",
"rdflib",
".",
"ConjunctiveGraph",
"(",
")",
"# save the path",
"self",
".",
"_dataUrl",
"=",
"dataUrl",
"self",
".",
"_scanDataFiles",
"(",
"self",
".",
"_dataUrl",
",",
"[",
"'*.ttl'",
"]",
")",
"# extract version",
"cgdTTL",
"=",
"rdflib",
".",
"URIRef",
"(",
"\"http://data.monarchinitiative.org/ttl/cgd.ttl\"",
")",
"versionInfo",
"=",
"rdflib",
".",
"URIRef",
"(",
"u'http://www.w3.org/2002/07/owl#versionInfo'",
")",
"self",
".",
"_version",
"=",
"None",
"for",
"_",
",",
"_",
",",
"obj",
"in",
"self",
".",
"_rdfGraph",
".",
"triples",
"(",
"(",
"cgdTTL",
",",
"versionInfo",
",",
"None",
")",
")",
":",
"self",
".",
"_version",
"=",
"obj",
".",
"toPython",
"(",
")",
"# setup location cache",
"self",
".",
"_initializeLocationCache",
"(",
")"
] | Populates the instance variables of this FeatureSet from the specified
data URL.
Initialize dataset, using the passed dict of sources
[{source,format}] see rdflib.parse() for more
If path is set, this backend will load itself | [
"Populates",
"the",
"instance",
"variables",
"of",
"this",
"FeatureSet",
"from",
"the",
"specified",
"data",
"URL",
".",
"Initialize",
"dataset",
"using",
"the",
"passed",
"dict",
"of",
"sources",
"[",
"{",
"source",
"format",
"}",
"]",
"see",
"rdflib",
".",
"parse",
"()",
"for",
"more",
"If",
"path",
"is",
"set",
"this",
"backend",
"will",
"load",
"itself"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/genotype_phenotype_featureset.py#L57-L81 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/genotype_phenotype_featureset.py | PhenotypeAssociationFeatureSet.getFeature | def getFeature(self, compoundId):
"""
find a feature and return ga4gh representation, use compoundId as
featureId
"""
feature = self._getFeatureById(compoundId.featureId)
feature.id = str(compoundId)
return feature | python | def getFeature(self, compoundId):
"""
find a feature and return ga4gh representation, use compoundId as
featureId
"""
feature = self._getFeatureById(compoundId.featureId)
feature.id = str(compoundId)
return feature | [
"def",
"getFeature",
"(",
"self",
",",
"compoundId",
")",
":",
"feature",
"=",
"self",
".",
"_getFeatureById",
"(",
"compoundId",
".",
"featureId",
")",
"feature",
".",
"id",
"=",
"str",
"(",
"compoundId",
")",
"return",
"feature"
] | find a feature and return ga4gh representation, use compoundId as
featureId | [
"find",
"a",
"feature",
"and",
"return",
"ga4gh",
"representation",
"use",
"compoundId",
"as",
"featureId"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/genotype_phenotype_featureset.py#L84-L91 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/genotype_phenotype_featureset.py | PhenotypeAssociationFeatureSet._getFeatureById | def _getFeatureById(self, featureId):
"""
find a feature and return ga4gh representation, use 'native' id as
featureId
"""
featureRef = rdflib.URIRef(featureId)
featureDetails = self._detailTuples([featureRef])
feature = {}
for detail in featureDetails:
feature[detail['predicate']] = []
for detail in featureDetails:
feature[detail['predicate']].append(detail['object'])
pbFeature = protocol.Feature()
term = protocol.OntologyTerm()
# Schema for feature only supports one type of `type`
# here we default to first OBO defined
for featureType in sorted(feature[TYPE]):
if "obolibrary" in featureType:
term.term = self._featureTypeLabel(featureType)
term.term_id = featureType
pbFeature.feature_type.MergeFrom(term)
break
pbFeature.id = featureId
# Schema for feature only supports one type of `name` `symbol`
# here we default to shortest for symbol and longest for name
feature[LABEL].sort(key=len)
pbFeature.gene_symbol = feature[LABEL][0]
pbFeature.name = feature[LABEL][-1]
pbFeature.attributes.MergeFrom(protocol.Attributes())
for key in feature:
for val in sorted(feature[key]):
pbFeature.attributes.attr[key].values.add().string_value = val
if featureId in self._locationMap:
location = self._locationMap[featureId]
pbFeature.reference_name = location["chromosome"]
pbFeature.start = location["begin"]
pbFeature.end = location["end"]
return pbFeature | python | def _getFeatureById(self, featureId):
"""
find a feature and return ga4gh representation, use 'native' id as
featureId
"""
featureRef = rdflib.URIRef(featureId)
featureDetails = self._detailTuples([featureRef])
feature = {}
for detail in featureDetails:
feature[detail['predicate']] = []
for detail in featureDetails:
feature[detail['predicate']].append(detail['object'])
pbFeature = protocol.Feature()
term = protocol.OntologyTerm()
# Schema for feature only supports one type of `type`
# here we default to first OBO defined
for featureType in sorted(feature[TYPE]):
if "obolibrary" in featureType:
term.term = self._featureTypeLabel(featureType)
term.term_id = featureType
pbFeature.feature_type.MergeFrom(term)
break
pbFeature.id = featureId
# Schema for feature only supports one type of `name` `symbol`
# here we default to shortest for symbol and longest for name
feature[LABEL].sort(key=len)
pbFeature.gene_symbol = feature[LABEL][0]
pbFeature.name = feature[LABEL][-1]
pbFeature.attributes.MergeFrom(protocol.Attributes())
for key in feature:
for val in sorted(feature[key]):
pbFeature.attributes.attr[key].values.add().string_value = val
if featureId in self._locationMap:
location = self._locationMap[featureId]
pbFeature.reference_name = location["chromosome"]
pbFeature.start = location["begin"]
pbFeature.end = location["end"]
return pbFeature | [
"def",
"_getFeatureById",
"(",
"self",
",",
"featureId",
")",
":",
"featureRef",
"=",
"rdflib",
".",
"URIRef",
"(",
"featureId",
")",
"featureDetails",
"=",
"self",
".",
"_detailTuples",
"(",
"[",
"featureRef",
"]",
")",
"feature",
"=",
"{",
"}",
"for",
"detail",
"in",
"featureDetails",
":",
"feature",
"[",
"detail",
"[",
"'predicate'",
"]",
"]",
"=",
"[",
"]",
"for",
"detail",
"in",
"featureDetails",
":",
"feature",
"[",
"detail",
"[",
"'predicate'",
"]",
"]",
".",
"append",
"(",
"detail",
"[",
"'object'",
"]",
")",
"pbFeature",
"=",
"protocol",
".",
"Feature",
"(",
")",
"term",
"=",
"protocol",
".",
"OntologyTerm",
"(",
")",
"# Schema for feature only supports one type of `type`",
"# here we default to first OBO defined",
"for",
"featureType",
"in",
"sorted",
"(",
"feature",
"[",
"TYPE",
"]",
")",
":",
"if",
"\"obolibrary\"",
"in",
"featureType",
":",
"term",
".",
"term",
"=",
"self",
".",
"_featureTypeLabel",
"(",
"featureType",
")",
"term",
".",
"term_id",
"=",
"featureType",
"pbFeature",
".",
"feature_type",
".",
"MergeFrom",
"(",
"term",
")",
"break",
"pbFeature",
".",
"id",
"=",
"featureId",
"# Schema for feature only supports one type of `name` `symbol`",
"# here we default to shortest for symbol and longest for name",
"feature",
"[",
"LABEL",
"]",
".",
"sort",
"(",
"key",
"=",
"len",
")",
"pbFeature",
".",
"gene_symbol",
"=",
"feature",
"[",
"LABEL",
"]",
"[",
"0",
"]",
"pbFeature",
".",
"name",
"=",
"feature",
"[",
"LABEL",
"]",
"[",
"-",
"1",
"]",
"pbFeature",
".",
"attributes",
".",
"MergeFrom",
"(",
"protocol",
".",
"Attributes",
"(",
")",
")",
"for",
"key",
"in",
"feature",
":",
"for",
"val",
"in",
"sorted",
"(",
"feature",
"[",
"key",
"]",
")",
":",
"pbFeature",
".",
"attributes",
".",
"attr",
"[",
"key",
"]",
".",
"values",
".",
"add",
"(",
")",
".",
"string_value",
"=",
"val",
"if",
"featureId",
"in",
"self",
".",
"_locationMap",
":",
"location",
"=",
"self",
".",
"_locationMap",
"[",
"featureId",
"]",
"pbFeature",
".",
"reference_name",
"=",
"location",
"[",
"\"chromosome\"",
"]",
"pbFeature",
".",
"start",
"=",
"location",
"[",
"\"begin\"",
"]",
"pbFeature",
".",
"end",
"=",
"location",
"[",
"\"end\"",
"]",
"return",
"pbFeature"
] | find a feature and return ga4gh representation, use 'native' id as
featureId | [
"find",
"a",
"feature",
"and",
"return",
"ga4gh",
"representation",
"use",
"native",
"id",
"as",
"featureId"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/genotype_phenotype_featureset.py#L93-L137 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/genotype_phenotype_featureset.py | PhenotypeAssociationFeatureSet._filterSearchFeaturesRequest | def _filterSearchFeaturesRequest(self, reference_name, gene_symbol, name,
start, end):
"""
formulate a sparql query string based on parameters
"""
filters = []
query = self._baseQuery()
filters = []
location = self._findLocation(reference_name, start, end)
if location:
filters.append("?feature = <{}>".format(location))
if gene_symbol:
filters.append('regex(?feature_label, "{}")')
if name:
filters.append(
'regex(?feature_label, "{}")'.format(name))
# apply filters
filter = "FILTER ({})".format(' && '.join(filters))
if len(filters) == 0:
filter = ""
query = query.replace("#%FILTER%", filter)
return query | python | def _filterSearchFeaturesRequest(self, reference_name, gene_symbol, name,
start, end):
"""
formulate a sparql query string based on parameters
"""
filters = []
query = self._baseQuery()
filters = []
location = self._findLocation(reference_name, start, end)
if location:
filters.append("?feature = <{}>".format(location))
if gene_symbol:
filters.append('regex(?feature_label, "{}")')
if name:
filters.append(
'regex(?feature_label, "{}")'.format(name))
# apply filters
filter = "FILTER ({})".format(' && '.join(filters))
if len(filters) == 0:
filter = ""
query = query.replace("#%FILTER%", filter)
return query | [
"def",
"_filterSearchFeaturesRequest",
"(",
"self",
",",
"reference_name",
",",
"gene_symbol",
",",
"name",
",",
"start",
",",
"end",
")",
":",
"filters",
"=",
"[",
"]",
"query",
"=",
"self",
".",
"_baseQuery",
"(",
")",
"filters",
"=",
"[",
"]",
"location",
"=",
"self",
".",
"_findLocation",
"(",
"reference_name",
",",
"start",
",",
"end",
")",
"if",
"location",
":",
"filters",
".",
"append",
"(",
"\"?feature = <{}>\"",
".",
"format",
"(",
"location",
")",
")",
"if",
"gene_symbol",
":",
"filters",
".",
"append",
"(",
"'regex(?feature_label, \"{}\")'",
")",
"if",
"name",
":",
"filters",
".",
"append",
"(",
"'regex(?feature_label, \"{}\")'",
".",
"format",
"(",
"name",
")",
")",
"# apply filters",
"filter",
"=",
"\"FILTER ({})\"",
".",
"format",
"(",
"' && '",
".",
"join",
"(",
"filters",
")",
")",
"if",
"len",
"(",
"filters",
")",
"==",
"0",
":",
"filter",
"=",
"\"\"",
"query",
"=",
"query",
".",
"replace",
"(",
"\"#%FILTER%\"",
",",
"filter",
")",
"return",
"query"
] | formulate a sparql query string based on parameters | [
"formulate",
"a",
"sparql",
"query",
"string",
"based",
"on",
"parameters"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/genotype_phenotype_featureset.py#L184-L205 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/genotype_phenotype_featureset.py | PhenotypeAssociationFeatureSet._findLocation | def _findLocation(self, reference_name, start, end):
"""
return a location key form the locationMap
"""
try:
# TODO - sequence_annotations does not have build?
return self._locationMap['hg19'][reference_name][start][end]
except:
return None | python | def _findLocation(self, reference_name, start, end):
"""
return a location key form the locationMap
"""
try:
# TODO - sequence_annotations does not have build?
return self._locationMap['hg19'][reference_name][start][end]
except:
return None | [
"def",
"_findLocation",
"(",
"self",
",",
"reference_name",
",",
"start",
",",
"end",
")",
":",
"try",
":",
"# TODO - sequence_annotations does not have build?",
"return",
"self",
".",
"_locationMap",
"[",
"'hg19'",
"]",
"[",
"reference_name",
"]",
"[",
"start",
"]",
"[",
"end",
"]",
"except",
":",
"return",
"None"
] | return a location key form the locationMap | [
"return",
"a",
"location",
"key",
"form",
"the",
"locationMap"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/genotype_phenotype_featureset.py#L207-L215 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/genotype_phenotype_featureset.py | PhenotypeAssociationFeatureSet._initializeLocationCache | def _initializeLocationCache(self):
"""
CGD uses Faldo ontology for locations, it's a bit complicated.
This function sets up an in memory cache of all locations, which
can be queried via:
locationMap[build][chromosome][begin][end] = location["_id"]
"""
# cache of locations
self._locationMap = {}
locationMap = self._locationMap
triples = self._rdfGraph.triples
Ref = rdflib.URIRef
associations = []
for subj, _, _ in triples((None, RDF.type, Ref(ASSOCIATION))):
associations.append(subj.toPython())
locationIds = []
for association in associations:
for _, _, obj in triples((Ref(association),
Ref(HAS_SUBJECT), None)):
locationIds.append(obj.toPython())
locations = []
for _id in locationIds:
location = {}
location["_id"] = _id
for subj, predicate, obj in triples((Ref(location["_id"]),
None, None)):
if not predicate.toPython() in location:
location[predicate.toPython()] = []
bisect.insort(location[predicate.toPython()], obj.toPython())
if FALDO_LOCATION in location:
locations.append(location)
for location in locations:
for _id in location[FALDO_LOCATION]:
# lookup faldo region, ensure positions are sorted
faldoLocation = {}
faldoLocation["_id"] = _id
for subj, predicate, obj in triples((Ref(faldoLocation["_id"]),
None, None)):
if not predicate.toPython() in faldoLocation:
faldoLocation[predicate.toPython()] = []
bisect.insort(faldoLocation[predicate.toPython()],
obj.toPython())
faldoBegins = []
for _id in faldoLocation[FALDO_BEGIN]:
faldoBegin = {}
faldoBegin["_id"] = _id
for subj, predicate, obj in triples(
(Ref(faldoBegin["_id"]),
None, None)):
faldoBegin[predicate.toPython()] = obj.toPython()
faldoBegins.append(faldoBegin)
faldoReferences = []
for _id in faldoLocation[FALDO_BEGIN]:
faldoReference = {}
faldoReference["_id"] = faldoBegin[FALDO_REFERENCE]
for subj, predicate, obj in triples(
(Ref(faldoReference["_id"]),
None, None)):
faldoReference[predicate.toPython()] = obj.toPython()
faldoReferences.append(faldoReference)
faldoEnds = []
for _id in faldoLocation[FALDO_END]:
faldoEnd = {}
faldoEnd["_id"] = _id
for subj, predicate, obj in triples((Ref(faldoEnd["_id"]),
None, None)):
faldoEnd[predicate.toPython()] = obj.toPython()
faldoEnds.append(faldoEnd)
for idx, faldoReference in enumerate(faldoReferences):
if MEMBER_OF in faldoReference:
build = faldoReference[MEMBER_OF].split('/')[-1]
chromosome = faldoReference[LABEL].split(' ')[0]
begin = faldoBegins[idx][FALDO_POSITION]
end = faldoEnds[idx][FALDO_POSITION]
if build not in locationMap:
locationMap[build] = {}
if chromosome not in locationMap[build]:
locationMap[build][chromosome] = {}
if begin not in locationMap[build][chromosome]:
locationMap[build][chromosome][begin] = {}
if end not in locationMap[build][chromosome][begin]:
locationMap[build][chromosome][begin][end] = {}
locationMap[build][chromosome][begin][end] = \
location["_id"]
locationMap[location["_id"]] = {
"build": build,
"chromosome": chromosome,
"begin": begin,
"end": end,
} | python | def _initializeLocationCache(self):
"""
CGD uses Faldo ontology for locations, it's a bit complicated.
This function sets up an in memory cache of all locations, which
can be queried via:
locationMap[build][chromosome][begin][end] = location["_id"]
"""
# cache of locations
self._locationMap = {}
locationMap = self._locationMap
triples = self._rdfGraph.triples
Ref = rdflib.URIRef
associations = []
for subj, _, _ in triples((None, RDF.type, Ref(ASSOCIATION))):
associations.append(subj.toPython())
locationIds = []
for association in associations:
for _, _, obj in triples((Ref(association),
Ref(HAS_SUBJECT), None)):
locationIds.append(obj.toPython())
locations = []
for _id in locationIds:
location = {}
location["_id"] = _id
for subj, predicate, obj in triples((Ref(location["_id"]),
None, None)):
if not predicate.toPython() in location:
location[predicate.toPython()] = []
bisect.insort(location[predicate.toPython()], obj.toPython())
if FALDO_LOCATION in location:
locations.append(location)
for location in locations:
for _id in location[FALDO_LOCATION]:
# lookup faldo region, ensure positions are sorted
faldoLocation = {}
faldoLocation["_id"] = _id
for subj, predicate, obj in triples((Ref(faldoLocation["_id"]),
None, None)):
if not predicate.toPython() in faldoLocation:
faldoLocation[predicate.toPython()] = []
bisect.insort(faldoLocation[predicate.toPython()],
obj.toPython())
faldoBegins = []
for _id in faldoLocation[FALDO_BEGIN]:
faldoBegin = {}
faldoBegin["_id"] = _id
for subj, predicate, obj in triples(
(Ref(faldoBegin["_id"]),
None, None)):
faldoBegin[predicate.toPython()] = obj.toPython()
faldoBegins.append(faldoBegin)
faldoReferences = []
for _id in faldoLocation[FALDO_BEGIN]:
faldoReference = {}
faldoReference["_id"] = faldoBegin[FALDO_REFERENCE]
for subj, predicate, obj in triples(
(Ref(faldoReference["_id"]),
None, None)):
faldoReference[predicate.toPython()] = obj.toPython()
faldoReferences.append(faldoReference)
faldoEnds = []
for _id in faldoLocation[FALDO_END]:
faldoEnd = {}
faldoEnd["_id"] = _id
for subj, predicate, obj in triples((Ref(faldoEnd["_id"]),
None, None)):
faldoEnd[predicate.toPython()] = obj.toPython()
faldoEnds.append(faldoEnd)
for idx, faldoReference in enumerate(faldoReferences):
if MEMBER_OF in faldoReference:
build = faldoReference[MEMBER_OF].split('/')[-1]
chromosome = faldoReference[LABEL].split(' ')[0]
begin = faldoBegins[idx][FALDO_POSITION]
end = faldoEnds[idx][FALDO_POSITION]
if build not in locationMap:
locationMap[build] = {}
if chromosome not in locationMap[build]:
locationMap[build][chromosome] = {}
if begin not in locationMap[build][chromosome]:
locationMap[build][chromosome][begin] = {}
if end not in locationMap[build][chromosome][begin]:
locationMap[build][chromosome][begin][end] = {}
locationMap[build][chromosome][begin][end] = \
location["_id"]
locationMap[location["_id"]] = {
"build": build,
"chromosome": chromosome,
"begin": begin,
"end": end,
} | [
"def",
"_initializeLocationCache",
"(",
"self",
")",
":",
"# cache of locations",
"self",
".",
"_locationMap",
"=",
"{",
"}",
"locationMap",
"=",
"self",
".",
"_locationMap",
"triples",
"=",
"self",
".",
"_rdfGraph",
".",
"triples",
"Ref",
"=",
"rdflib",
".",
"URIRef",
"associations",
"=",
"[",
"]",
"for",
"subj",
",",
"_",
",",
"_",
"in",
"triples",
"(",
"(",
"None",
",",
"RDF",
".",
"type",
",",
"Ref",
"(",
"ASSOCIATION",
")",
")",
")",
":",
"associations",
".",
"append",
"(",
"subj",
".",
"toPython",
"(",
")",
")",
"locationIds",
"=",
"[",
"]",
"for",
"association",
"in",
"associations",
":",
"for",
"_",
",",
"_",
",",
"obj",
"in",
"triples",
"(",
"(",
"Ref",
"(",
"association",
")",
",",
"Ref",
"(",
"HAS_SUBJECT",
")",
",",
"None",
")",
")",
":",
"locationIds",
".",
"append",
"(",
"obj",
".",
"toPython",
"(",
")",
")",
"locations",
"=",
"[",
"]",
"for",
"_id",
"in",
"locationIds",
":",
"location",
"=",
"{",
"}",
"location",
"[",
"\"_id\"",
"]",
"=",
"_id",
"for",
"subj",
",",
"predicate",
",",
"obj",
"in",
"triples",
"(",
"(",
"Ref",
"(",
"location",
"[",
"\"_id\"",
"]",
")",
",",
"None",
",",
"None",
")",
")",
":",
"if",
"not",
"predicate",
".",
"toPython",
"(",
")",
"in",
"location",
":",
"location",
"[",
"predicate",
".",
"toPython",
"(",
")",
"]",
"=",
"[",
"]",
"bisect",
".",
"insort",
"(",
"location",
"[",
"predicate",
".",
"toPython",
"(",
")",
"]",
",",
"obj",
".",
"toPython",
"(",
")",
")",
"if",
"FALDO_LOCATION",
"in",
"location",
":",
"locations",
".",
"append",
"(",
"location",
")",
"for",
"location",
"in",
"locations",
":",
"for",
"_id",
"in",
"location",
"[",
"FALDO_LOCATION",
"]",
":",
"# lookup faldo region, ensure positions are sorted",
"faldoLocation",
"=",
"{",
"}",
"faldoLocation",
"[",
"\"_id\"",
"]",
"=",
"_id",
"for",
"subj",
",",
"predicate",
",",
"obj",
"in",
"triples",
"(",
"(",
"Ref",
"(",
"faldoLocation",
"[",
"\"_id\"",
"]",
")",
",",
"None",
",",
"None",
")",
")",
":",
"if",
"not",
"predicate",
".",
"toPython",
"(",
")",
"in",
"faldoLocation",
":",
"faldoLocation",
"[",
"predicate",
".",
"toPython",
"(",
")",
"]",
"=",
"[",
"]",
"bisect",
".",
"insort",
"(",
"faldoLocation",
"[",
"predicate",
".",
"toPython",
"(",
")",
"]",
",",
"obj",
".",
"toPython",
"(",
")",
")",
"faldoBegins",
"=",
"[",
"]",
"for",
"_id",
"in",
"faldoLocation",
"[",
"FALDO_BEGIN",
"]",
":",
"faldoBegin",
"=",
"{",
"}",
"faldoBegin",
"[",
"\"_id\"",
"]",
"=",
"_id",
"for",
"subj",
",",
"predicate",
",",
"obj",
"in",
"triples",
"(",
"(",
"Ref",
"(",
"faldoBegin",
"[",
"\"_id\"",
"]",
")",
",",
"None",
",",
"None",
")",
")",
":",
"faldoBegin",
"[",
"predicate",
".",
"toPython",
"(",
")",
"]",
"=",
"obj",
".",
"toPython",
"(",
")",
"faldoBegins",
".",
"append",
"(",
"faldoBegin",
")",
"faldoReferences",
"=",
"[",
"]",
"for",
"_id",
"in",
"faldoLocation",
"[",
"FALDO_BEGIN",
"]",
":",
"faldoReference",
"=",
"{",
"}",
"faldoReference",
"[",
"\"_id\"",
"]",
"=",
"faldoBegin",
"[",
"FALDO_REFERENCE",
"]",
"for",
"subj",
",",
"predicate",
",",
"obj",
"in",
"triples",
"(",
"(",
"Ref",
"(",
"faldoReference",
"[",
"\"_id\"",
"]",
")",
",",
"None",
",",
"None",
")",
")",
":",
"faldoReference",
"[",
"predicate",
".",
"toPython",
"(",
")",
"]",
"=",
"obj",
".",
"toPython",
"(",
")",
"faldoReferences",
".",
"append",
"(",
"faldoReference",
")",
"faldoEnds",
"=",
"[",
"]",
"for",
"_id",
"in",
"faldoLocation",
"[",
"FALDO_END",
"]",
":",
"faldoEnd",
"=",
"{",
"}",
"faldoEnd",
"[",
"\"_id\"",
"]",
"=",
"_id",
"for",
"subj",
",",
"predicate",
",",
"obj",
"in",
"triples",
"(",
"(",
"Ref",
"(",
"faldoEnd",
"[",
"\"_id\"",
"]",
")",
",",
"None",
",",
"None",
")",
")",
":",
"faldoEnd",
"[",
"predicate",
".",
"toPython",
"(",
")",
"]",
"=",
"obj",
".",
"toPython",
"(",
")",
"faldoEnds",
".",
"append",
"(",
"faldoEnd",
")",
"for",
"idx",
",",
"faldoReference",
"in",
"enumerate",
"(",
"faldoReferences",
")",
":",
"if",
"MEMBER_OF",
"in",
"faldoReference",
":",
"build",
"=",
"faldoReference",
"[",
"MEMBER_OF",
"]",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
"chromosome",
"=",
"faldoReference",
"[",
"LABEL",
"]",
".",
"split",
"(",
"' '",
")",
"[",
"0",
"]",
"begin",
"=",
"faldoBegins",
"[",
"idx",
"]",
"[",
"FALDO_POSITION",
"]",
"end",
"=",
"faldoEnds",
"[",
"idx",
"]",
"[",
"FALDO_POSITION",
"]",
"if",
"build",
"not",
"in",
"locationMap",
":",
"locationMap",
"[",
"build",
"]",
"=",
"{",
"}",
"if",
"chromosome",
"not",
"in",
"locationMap",
"[",
"build",
"]",
":",
"locationMap",
"[",
"build",
"]",
"[",
"chromosome",
"]",
"=",
"{",
"}",
"if",
"begin",
"not",
"in",
"locationMap",
"[",
"build",
"]",
"[",
"chromosome",
"]",
":",
"locationMap",
"[",
"build",
"]",
"[",
"chromosome",
"]",
"[",
"begin",
"]",
"=",
"{",
"}",
"if",
"end",
"not",
"in",
"locationMap",
"[",
"build",
"]",
"[",
"chromosome",
"]",
"[",
"begin",
"]",
":",
"locationMap",
"[",
"build",
"]",
"[",
"chromosome",
"]",
"[",
"begin",
"]",
"[",
"end",
"]",
"=",
"{",
"}",
"locationMap",
"[",
"build",
"]",
"[",
"chromosome",
"]",
"[",
"begin",
"]",
"[",
"end",
"]",
"=",
"location",
"[",
"\"_id\"",
"]",
"locationMap",
"[",
"location",
"[",
"\"_id\"",
"]",
"]",
"=",
"{",
"\"build\"",
":",
"build",
",",
"\"chromosome\"",
":",
"chromosome",
",",
"\"begin\"",
":",
"begin",
",",
"\"end\"",
":",
"end",
",",
"}"
] | CGD uses Faldo ontology for locations, it's a bit complicated.
This function sets up an in memory cache of all locations, which
can be queried via:
locationMap[build][chromosome][begin][end] = location["_id"] | [
"CGD",
"uses",
"Faldo",
"ontology",
"for",
"locations",
"it",
"s",
"a",
"bit",
"complicated",
".",
"This",
"function",
"sets",
"up",
"an",
"in",
"memory",
"cache",
"of",
"all",
"locations",
"which",
"can",
"be",
"queried",
"via",
":",
"locationMap",
"[",
"build",
"]",
"[",
"chromosome",
"]",
"[",
"begin",
"]",
"[",
"end",
"]",
"=",
"location",
"[",
"_id",
"]"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/genotype_phenotype_featureset.py#L217-L315 | train |
ga4gh/ga4gh-server | ga4gh/server/response_builder.py | SearchResponseBuilder.addValue | def addValue(self, protocolElement):
"""
Appends the specified protocolElement to the value list for this
response.
"""
self._numElements += 1
self._bufferSize += protocolElement.ByteSize()
attr = getattr(self._protoObject, self._valueListName)
obj = attr.add()
obj.CopyFrom(protocolElement) | python | def addValue(self, protocolElement):
"""
Appends the specified protocolElement to the value list for this
response.
"""
self._numElements += 1
self._bufferSize += protocolElement.ByteSize()
attr = getattr(self._protoObject, self._valueListName)
obj = attr.add()
obj.CopyFrom(protocolElement) | [
"def",
"addValue",
"(",
"self",
",",
"protocolElement",
")",
":",
"self",
".",
"_numElements",
"+=",
"1",
"self",
".",
"_bufferSize",
"+=",
"protocolElement",
".",
"ByteSize",
"(",
")",
"attr",
"=",
"getattr",
"(",
"self",
".",
"_protoObject",
",",
"self",
".",
"_valueListName",
")",
"obj",
"=",
"attr",
".",
"add",
"(",
")",
"obj",
".",
"CopyFrom",
"(",
"protocolElement",
")"
] | Appends the specified protocolElement to the value list for this
response. | [
"Appends",
"the",
"specified",
"protocolElement",
"to",
"the",
"value",
"list",
"for",
"this",
"response",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/response_builder.py#L61-L70 | train |
ga4gh/ga4gh-server | ga4gh/server/response_builder.py | SearchResponseBuilder.isFull | def isFull(self):
"""
Returns True if the response buffer is full, and False otherwise.
The buffer is full if either (1) the number of items in the value
list is >= pageSize or (2) the total length of the serialised
elements in the page is >= maxBufferSize.
If page_size or max_response_length were not set in the request
then they're not checked.
"""
return (
(self._pageSize > 0 and self._numElements >= self._pageSize) or
(self._bufferSize >= self._maxBufferSize)
) | python | def isFull(self):
"""
Returns True if the response buffer is full, and False otherwise.
The buffer is full if either (1) the number of items in the value
list is >= pageSize or (2) the total length of the serialised
elements in the page is >= maxBufferSize.
If page_size or max_response_length were not set in the request
then they're not checked.
"""
return (
(self._pageSize > 0 and self._numElements >= self._pageSize) or
(self._bufferSize >= self._maxBufferSize)
) | [
"def",
"isFull",
"(",
"self",
")",
":",
"return",
"(",
"(",
"self",
".",
"_pageSize",
">",
"0",
"and",
"self",
".",
"_numElements",
">=",
"self",
".",
"_pageSize",
")",
"or",
"(",
"self",
".",
"_bufferSize",
">=",
"self",
".",
"_maxBufferSize",
")",
")"
] | Returns True if the response buffer is full, and False otherwise.
The buffer is full if either (1) the number of items in the value
list is >= pageSize or (2) the total length of the serialised
elements in the page is >= maxBufferSize.
If page_size or max_response_length were not set in the request
then they're not checked. | [
"Returns",
"True",
"if",
"the",
"response",
"buffer",
"is",
"full",
"and",
"False",
"otherwise",
".",
"The",
"buffer",
"is",
"full",
"if",
"either",
"(",
"1",
")",
"the",
"number",
"of",
"items",
"in",
"the",
"value",
"list",
"is",
">",
"=",
"pageSize",
"or",
"(",
"2",
")",
"the",
"total",
"length",
"of",
"the",
"serialised",
"elements",
"in",
"the",
"page",
"is",
">",
"=",
"maxBufferSize",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/response_builder.py#L72-L85 | train |
ga4gh/ga4gh-server | ga4gh/server/response_builder.py | SearchResponseBuilder.getSerializedResponse | def getSerializedResponse(self):
"""
Returns a string version of the SearchResponse that has
been built by this SearchResponseBuilder.
"""
self._protoObject.next_page_token = pb.string(self._nextPageToken)
s = protocol.toJson(self._protoObject)
return s | python | def getSerializedResponse(self):
"""
Returns a string version of the SearchResponse that has
been built by this SearchResponseBuilder.
"""
self._protoObject.next_page_token = pb.string(self._nextPageToken)
s = protocol.toJson(self._protoObject)
return s | [
"def",
"getSerializedResponse",
"(",
"self",
")",
":",
"self",
".",
"_protoObject",
".",
"next_page_token",
"=",
"pb",
".",
"string",
"(",
"self",
".",
"_nextPageToken",
")",
"s",
"=",
"protocol",
".",
"toJson",
"(",
"self",
".",
"_protoObject",
")",
"return",
"s"
] | Returns a string version of the SearchResponse that has
been built by this SearchResponseBuilder. | [
"Returns",
"a",
"string",
"version",
"of",
"the",
"SearchResponse",
"that",
"has",
"been",
"built",
"by",
"this",
"SearchResponseBuilder",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/response_builder.py#L87-L94 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/ontologies.py | Ontology.populateFromRow | def populateFromRow(self, ontologyRecord):
"""
Populates this Ontology using values in the specified DB row.
"""
self._id = ontologyRecord.id
self._dataUrl = ontologyRecord.dataurl
self._readFile() | python | def populateFromRow(self, ontologyRecord):
"""
Populates this Ontology using values in the specified DB row.
"""
self._id = ontologyRecord.id
self._dataUrl = ontologyRecord.dataurl
self._readFile() | [
"def",
"populateFromRow",
"(",
"self",
",",
"ontologyRecord",
")",
":",
"self",
".",
"_id",
"=",
"ontologyRecord",
".",
"id",
"self",
".",
"_dataUrl",
"=",
"ontologyRecord",
".",
"dataurl",
"self",
".",
"_readFile",
"(",
")"
] | Populates this Ontology using values in the specified DB row. | [
"Populates",
"this",
"Ontology",
"using",
"values",
"in",
"the",
"specified",
"DB",
"row",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/ontologies.py#L75-L81 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/ontologies.py | Ontology.getGaTermByName | def getGaTermByName(self, name):
"""
Returns a GA4GH OntologyTerm object by name.
:param name: name of the ontology term, ex. "gene".
:return: GA4GH OntologyTerm object.
"""
# TODO what is the correct value when we have no mapping??
termIds = self.getTermIds(name)
if len(termIds) == 0:
termId = ""
# TODO add logging for missed term translation.
else:
# TODO what is the correct behaviour here when we have multiple
# IDs matching a given name?
termId = termIds[0]
term = protocol.OntologyTerm()
term.term = name
term.term_id = termId
return term | python | def getGaTermByName(self, name):
"""
Returns a GA4GH OntologyTerm object by name.
:param name: name of the ontology term, ex. "gene".
:return: GA4GH OntologyTerm object.
"""
# TODO what is the correct value when we have no mapping??
termIds = self.getTermIds(name)
if len(termIds) == 0:
termId = ""
# TODO add logging for missed term translation.
else:
# TODO what is the correct behaviour here when we have multiple
# IDs matching a given name?
termId = termIds[0]
term = protocol.OntologyTerm()
term.term = name
term.term_id = termId
return term | [
"def",
"getGaTermByName",
"(",
"self",
",",
"name",
")",
":",
"# TODO what is the correct value when we have no mapping??",
"termIds",
"=",
"self",
".",
"getTermIds",
"(",
"name",
")",
"if",
"len",
"(",
"termIds",
")",
"==",
"0",
":",
"termId",
"=",
"\"\"",
"# TODO add logging for missed term translation.",
"else",
":",
"# TODO what is the correct behaviour here when we have multiple",
"# IDs matching a given name?",
"termId",
"=",
"termIds",
"[",
"0",
"]",
"term",
"=",
"protocol",
".",
"OntologyTerm",
"(",
")",
"term",
".",
"term",
"=",
"name",
"term",
".",
"term_id",
"=",
"termId",
"return",
"term"
] | Returns a GA4GH OntologyTerm object by name.
:param name: name of the ontology term, ex. "gene".
:return: GA4GH OntologyTerm object. | [
"Returns",
"a",
"GA4GH",
"OntologyTerm",
"object",
"by",
"name",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/ontologies.py#L114-L133 | train |
ga4gh/ga4gh-server | scripts/server_benchmark.py | _heavyQuery | def _heavyQuery(variantSetId, callSetIds):
"""
Very heavy query: calls for the specified list of callSetIds
on chromosome 2 (11 pages, 90 seconds to fetch the entire thing
on a high-end desktop machine)
"""
request = protocol.SearchVariantsRequest()
request.reference_name = '2'
request.variant_set_id = variantSetId
for callSetId in callSetIds:
request.call_set_ids.add(callSetId)
request.page_size = 100
request.end = 100000
return request | python | def _heavyQuery(variantSetId, callSetIds):
"""
Very heavy query: calls for the specified list of callSetIds
on chromosome 2 (11 pages, 90 seconds to fetch the entire thing
on a high-end desktop machine)
"""
request = protocol.SearchVariantsRequest()
request.reference_name = '2'
request.variant_set_id = variantSetId
for callSetId in callSetIds:
request.call_set_ids.add(callSetId)
request.page_size = 100
request.end = 100000
return request | [
"def",
"_heavyQuery",
"(",
"variantSetId",
",",
"callSetIds",
")",
":",
"request",
"=",
"protocol",
".",
"SearchVariantsRequest",
"(",
")",
"request",
".",
"reference_name",
"=",
"'2'",
"request",
".",
"variant_set_id",
"=",
"variantSetId",
"for",
"callSetId",
"in",
"callSetIds",
":",
"request",
".",
"call_set_ids",
".",
"add",
"(",
"callSetId",
")",
"request",
".",
"page_size",
"=",
"100",
"request",
".",
"end",
"=",
"100000",
"return",
"request"
] | Very heavy query: calls for the specified list of callSetIds
on chromosome 2 (11 pages, 90 seconds to fetch the entire thing
on a high-end desktop machine) | [
"Very",
"heavy",
"query",
":",
"calls",
"for",
"the",
"specified",
"list",
"of",
"callSetIds",
"on",
"chromosome",
"2",
"(",
"11",
"pages",
"90",
"seconds",
"to",
"fetch",
"the",
"entire",
"thing",
"on",
"a",
"high",
"-",
"end",
"desktop",
"machine",
")"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/scripts/server_benchmark.py#L53-L66 | train |
ga4gh/ga4gh-server | scripts/server_benchmark.py | timeOneSearch | def timeOneSearch(queryString):
"""
Returns (search result as JSON string, time elapsed during search)
"""
startTime = time.clock()
resultString = backend.runSearchVariants(queryString)
endTime = time.clock()
elapsedTime = endTime - startTime
return resultString, elapsedTime | python | def timeOneSearch(queryString):
"""
Returns (search result as JSON string, time elapsed during search)
"""
startTime = time.clock()
resultString = backend.runSearchVariants(queryString)
endTime = time.clock()
elapsedTime = endTime - startTime
return resultString, elapsedTime | [
"def",
"timeOneSearch",
"(",
"queryString",
")",
":",
"startTime",
"=",
"time",
".",
"clock",
"(",
")",
"resultString",
"=",
"backend",
".",
"runSearchVariants",
"(",
"queryString",
")",
"endTime",
"=",
"time",
".",
"clock",
"(",
")",
"elapsedTime",
"=",
"endTime",
"-",
"startTime",
"return",
"resultString",
",",
"elapsedTime"
] | Returns (search result as JSON string, time elapsed during search) | [
"Returns",
"(",
"search",
"result",
"as",
"JSON",
"string",
"time",
"elapsed",
"during",
"search",
")"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/scripts/server_benchmark.py#L69-L77 | train |
ga4gh/ga4gh-server | scripts/server_benchmark.py | benchmarkOneQuery | def benchmarkOneQuery(request, repeatLimit=3, pageLimit=3):
"""
Repeat the query several times; perhaps don't go through *all* the
pages. Returns minimum time to run backend.searchVariants() to execute
the query (as far as pageLimit allows), *not* including JSON
processing to prepare queries or parse responses.
"""
times = []
queryString = protocol.toJson(request)
for i in range(0, repeatLimit):
resultString, elapsedTime = timeOneSearch(queryString)
accruedTime = elapsedTime
pageCount = 1
token = extractNextPageToken(resultString)
# Iterate to go beyond the first page of results.
while token is not None and pageCount < pageLimit:
pageRequest = request
pageRequest.page_token = token
pageRequestString = protocol.toJson(pageRequest)
resultString, elapsedTime = timeOneSearch(pageRequestString)
accruedTime += elapsedTime
pageCount = pageCount + 1
token = extractNextPageToken(resultString)
times.append(accruedTime)
# TODO: more sophisticated statistics. Sometimes we want min(),
# sometimes mean = sum() / len(), sometimes other measures,
# perhaps exclude outliers...
# If we compute average we should throw out at least the first one.
# return sum(times[2:])/len(times[2:])
return min(times) | python | def benchmarkOneQuery(request, repeatLimit=3, pageLimit=3):
"""
Repeat the query several times; perhaps don't go through *all* the
pages. Returns minimum time to run backend.searchVariants() to execute
the query (as far as pageLimit allows), *not* including JSON
processing to prepare queries or parse responses.
"""
times = []
queryString = protocol.toJson(request)
for i in range(0, repeatLimit):
resultString, elapsedTime = timeOneSearch(queryString)
accruedTime = elapsedTime
pageCount = 1
token = extractNextPageToken(resultString)
# Iterate to go beyond the first page of results.
while token is not None and pageCount < pageLimit:
pageRequest = request
pageRequest.page_token = token
pageRequestString = protocol.toJson(pageRequest)
resultString, elapsedTime = timeOneSearch(pageRequestString)
accruedTime += elapsedTime
pageCount = pageCount + 1
token = extractNextPageToken(resultString)
times.append(accruedTime)
# TODO: more sophisticated statistics. Sometimes we want min(),
# sometimes mean = sum() / len(), sometimes other measures,
# perhaps exclude outliers...
# If we compute average we should throw out at least the first one.
# return sum(times[2:])/len(times[2:])
return min(times) | [
"def",
"benchmarkOneQuery",
"(",
"request",
",",
"repeatLimit",
"=",
"3",
",",
"pageLimit",
"=",
"3",
")",
":",
"times",
"=",
"[",
"]",
"queryString",
"=",
"protocol",
".",
"toJson",
"(",
"request",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"repeatLimit",
")",
":",
"resultString",
",",
"elapsedTime",
"=",
"timeOneSearch",
"(",
"queryString",
")",
"accruedTime",
"=",
"elapsedTime",
"pageCount",
"=",
"1",
"token",
"=",
"extractNextPageToken",
"(",
"resultString",
")",
"# Iterate to go beyond the first page of results.",
"while",
"token",
"is",
"not",
"None",
"and",
"pageCount",
"<",
"pageLimit",
":",
"pageRequest",
"=",
"request",
"pageRequest",
".",
"page_token",
"=",
"token",
"pageRequestString",
"=",
"protocol",
".",
"toJson",
"(",
"pageRequest",
")",
"resultString",
",",
"elapsedTime",
"=",
"timeOneSearch",
"(",
"pageRequestString",
")",
"accruedTime",
"+=",
"elapsedTime",
"pageCount",
"=",
"pageCount",
"+",
"1",
"token",
"=",
"extractNextPageToken",
"(",
"resultString",
")",
"times",
".",
"append",
"(",
"accruedTime",
")",
"# TODO: more sophisticated statistics. Sometimes we want min(),",
"# sometimes mean = sum() / len(), sometimes other measures,",
"# perhaps exclude outliers...",
"# If we compute average we should throw out at least the first one.",
"# return sum(times[2:])/len(times[2:])",
"return",
"min",
"(",
"times",
")"
] | Repeat the query several times; perhaps don't go through *all* the
pages. Returns minimum time to run backend.searchVariants() to execute
the query (as far as pageLimit allows), *not* including JSON
processing to prepare queries or parse responses. | [
"Repeat",
"the",
"query",
"several",
"times",
";",
"perhaps",
"don",
"t",
"go",
"through",
"*",
"all",
"*",
"the",
"pages",
".",
"Returns",
"minimum",
"time",
"to",
"run",
"backend",
".",
"searchVariants",
"()",
"to",
"execute",
"the",
"query",
"(",
"as",
"far",
"as",
"pageLimit",
"allows",
")",
"*",
"not",
"*",
"including",
"JSON",
"processing",
"to",
"prepare",
"queries",
"or",
"parse",
"responses",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/scripts/server_benchmark.py#L93-L124 | train |
ga4gh/ga4gh-server | ga4gh/server/exceptions.py | getExceptionClass | def getExceptionClass(errorCode):
"""
Converts the specified error code into the corresponding class object.
Raises a KeyError if the errorCode is not found.
"""
classMap = {}
for name, class_ in inspect.getmembers(sys.modules[__name__]):
if inspect.isclass(class_) and issubclass(class_, BaseServerException):
classMap[class_.getErrorCode()] = class_
return classMap[errorCode] | python | def getExceptionClass(errorCode):
"""
Converts the specified error code into the corresponding class object.
Raises a KeyError if the errorCode is not found.
"""
classMap = {}
for name, class_ in inspect.getmembers(sys.modules[__name__]):
if inspect.isclass(class_) and issubclass(class_, BaseServerException):
classMap[class_.getErrorCode()] = class_
return classMap[errorCode] | [
"def",
"getExceptionClass",
"(",
"errorCode",
")",
":",
"classMap",
"=",
"{",
"}",
"for",
"name",
",",
"class_",
"in",
"inspect",
".",
"getmembers",
"(",
"sys",
".",
"modules",
"[",
"__name__",
"]",
")",
":",
"if",
"inspect",
".",
"isclass",
"(",
"class_",
")",
"and",
"issubclass",
"(",
"class_",
",",
"BaseServerException",
")",
":",
"classMap",
"[",
"class_",
".",
"getErrorCode",
"(",
")",
"]",
"=",
"class_",
"return",
"classMap",
"[",
"errorCode",
"]"
] | Converts the specified error code into the corresponding class object.
Raises a KeyError if the errorCode is not found. | [
"Converts",
"the",
"specified",
"error",
"code",
"into",
"the",
"corresponding",
"class",
"object",
".",
"Raises",
"a",
"KeyError",
"if",
"the",
"errorCode",
"is",
"not",
"found",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/exceptions.py#L17-L26 | train |
ga4gh/ga4gh-server | ga4gh/server/exceptions.py | RuntimeException.toProtocolElement | def toProtocolElement(self):
"""
Converts this exception into the GA4GH protocol type so that
it can be communicated back to the client.
"""
error = protocol.GAException()
error.error_code = self.getErrorCode()
error.message = self.getMessage()
return error | python | def toProtocolElement(self):
"""
Converts this exception into the GA4GH protocol type so that
it can be communicated back to the client.
"""
error = protocol.GAException()
error.error_code = self.getErrorCode()
error.message = self.getMessage()
return error | [
"def",
"toProtocolElement",
"(",
"self",
")",
":",
"error",
"=",
"protocol",
".",
"GAException",
"(",
")",
"error",
".",
"error_code",
"=",
"self",
".",
"getErrorCode",
"(",
")",
"error",
".",
"message",
"=",
"self",
".",
"getMessage",
"(",
")",
"return",
"error"
] | Converts this exception into the GA4GH protocol type so that
it can be communicated back to the client. | [
"Converts",
"this",
"exception",
"into",
"the",
"GA4GH",
"protocol",
"type",
"so",
"that",
"it",
"can",
"be",
"communicated",
"back",
"to",
"the",
"client",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/exceptions.py#L94-L102 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/obo_parser.py | OBOReader._init_goterm_ref | def _init_goterm_ref(self, rec_curr, name, lnum):
"""Initialize new reference and perform checks."""
if rec_curr is None:
return GOTerm()
msg = "PREVIOUS {REC} WAS NOT TERMINATED AS EXPECTED".format(REC=name)
self._die(msg, lnum) | python | def _init_goterm_ref(self, rec_curr, name, lnum):
"""Initialize new reference and perform checks."""
if rec_curr is None:
return GOTerm()
msg = "PREVIOUS {REC} WAS NOT TERMINATED AS EXPECTED".format(REC=name)
self._die(msg, lnum) | [
"def",
"_init_goterm_ref",
"(",
"self",
",",
"rec_curr",
",",
"name",
",",
"lnum",
")",
":",
"if",
"rec_curr",
"is",
"None",
":",
"return",
"GOTerm",
"(",
")",
"msg",
"=",
"\"PREVIOUS {REC} WAS NOT TERMINATED AS EXPECTED\"",
".",
"format",
"(",
"REC",
"=",
"name",
")",
"self",
".",
"_die",
"(",
"msg",
",",
"lnum",
")"
] | Initialize new reference and perform checks. | [
"Initialize",
"new",
"reference",
"and",
"perform",
"checks",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L116-L121 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/obo_parser.py | OBOReader._init_typedef | def _init_typedef(self, typedef_curr, name, lnum):
"""Initialize new typedef and perform checks."""
if typedef_curr is None:
return TypeDef()
msg = "PREVIOUS {REC} WAS NOT TERMINATED AS EXPECTED".format(REC=name)
self._die(msg, lnum) | python | def _init_typedef(self, typedef_curr, name, lnum):
"""Initialize new typedef and perform checks."""
if typedef_curr is None:
return TypeDef()
msg = "PREVIOUS {REC} WAS NOT TERMINATED AS EXPECTED".format(REC=name)
self._die(msg, lnum) | [
"def",
"_init_typedef",
"(",
"self",
",",
"typedef_curr",
",",
"name",
",",
"lnum",
")",
":",
"if",
"typedef_curr",
"is",
"None",
":",
"return",
"TypeDef",
"(",
")",
"msg",
"=",
"\"PREVIOUS {REC} WAS NOT TERMINATED AS EXPECTED\"",
".",
"format",
"(",
"REC",
"=",
"name",
")",
"self",
".",
"_die",
"(",
"msg",
",",
"lnum",
")"
] | Initialize new typedef and perform checks. | [
"Initialize",
"new",
"typedef",
"and",
"perform",
"checks",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L123-L128 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/obo_parser.py | OBOReader._add_to_ref | def _add_to_ref(self, rec_curr, line, lnum):
"""Add new fields to the current reference."""
# Written by DV Klopfenstein
# Examples of record lines containing ':' include:
# id: GO:0000002
# name: mitochondrial genome maintenance
# namespace: biological_process
# def: "The maintenance of ...
# is_a: GO:0007005 ! mitochondrion organization
mtch = re.match(r'^(\S+):\s*(\S.*)$', line)
if mtch:
field_name = mtch.group(1)
field_value = mtch.group(2)
if field_name == "id":
self._chk_none(rec_curr.id, lnum)
rec_curr.id = field_value
elif field_name == "alt_id":
rec_curr.alt_ids.append(field_value)
elif field_name == "name":
self._chk_none(rec_curr.name, lnum)
rec_curr.name = field_value
elif field_name == "namespace":
self._chk_none(rec_curr.namespace, lnum)
rec_curr.namespace = field_value
elif field_name == "is_a":
rec_curr._parents.append(field_value.split()[0])
elif field_name == "is_obsolete" and field_value == "true":
rec_curr.is_obsolete = True
elif field_name in self.optional_attrs:
self.update_rec(rec_curr, field_name, field_value)
else:
self._die("UNEXPECTED FIELD CONTENT: {L}\n".format(L=line), lnum) | python | def _add_to_ref(self, rec_curr, line, lnum):
"""Add new fields to the current reference."""
# Written by DV Klopfenstein
# Examples of record lines containing ':' include:
# id: GO:0000002
# name: mitochondrial genome maintenance
# namespace: biological_process
# def: "The maintenance of ...
# is_a: GO:0007005 ! mitochondrion organization
mtch = re.match(r'^(\S+):\s*(\S.*)$', line)
if mtch:
field_name = mtch.group(1)
field_value = mtch.group(2)
if field_name == "id":
self._chk_none(rec_curr.id, lnum)
rec_curr.id = field_value
elif field_name == "alt_id":
rec_curr.alt_ids.append(field_value)
elif field_name == "name":
self._chk_none(rec_curr.name, lnum)
rec_curr.name = field_value
elif field_name == "namespace":
self._chk_none(rec_curr.namespace, lnum)
rec_curr.namespace = field_value
elif field_name == "is_a":
rec_curr._parents.append(field_value.split()[0])
elif field_name == "is_obsolete" and field_value == "true":
rec_curr.is_obsolete = True
elif field_name in self.optional_attrs:
self.update_rec(rec_curr, field_name, field_value)
else:
self._die("UNEXPECTED FIELD CONTENT: {L}\n".format(L=line), lnum) | [
"def",
"_add_to_ref",
"(",
"self",
",",
"rec_curr",
",",
"line",
",",
"lnum",
")",
":",
"# Written by DV Klopfenstein",
"# Examples of record lines containing ':' include:",
"# id: GO:0000002",
"# name: mitochondrial genome maintenance",
"# namespace: biological_process",
"# def: \"The maintenance of ...",
"# is_a: GO:0007005 ! mitochondrion organization",
"mtch",
"=",
"re",
".",
"match",
"(",
"r'^(\\S+):\\s*(\\S.*)$'",
",",
"line",
")",
"if",
"mtch",
":",
"field_name",
"=",
"mtch",
".",
"group",
"(",
"1",
")",
"field_value",
"=",
"mtch",
".",
"group",
"(",
"2",
")",
"if",
"field_name",
"==",
"\"id\"",
":",
"self",
".",
"_chk_none",
"(",
"rec_curr",
".",
"id",
",",
"lnum",
")",
"rec_curr",
".",
"id",
"=",
"field_value",
"elif",
"field_name",
"==",
"\"alt_id\"",
":",
"rec_curr",
".",
"alt_ids",
".",
"append",
"(",
"field_value",
")",
"elif",
"field_name",
"==",
"\"name\"",
":",
"self",
".",
"_chk_none",
"(",
"rec_curr",
".",
"name",
",",
"lnum",
")",
"rec_curr",
".",
"name",
"=",
"field_value",
"elif",
"field_name",
"==",
"\"namespace\"",
":",
"self",
".",
"_chk_none",
"(",
"rec_curr",
".",
"namespace",
",",
"lnum",
")",
"rec_curr",
".",
"namespace",
"=",
"field_value",
"elif",
"field_name",
"==",
"\"is_a\"",
":",
"rec_curr",
".",
"_parents",
".",
"append",
"(",
"field_value",
".",
"split",
"(",
")",
"[",
"0",
"]",
")",
"elif",
"field_name",
"==",
"\"is_obsolete\"",
"and",
"field_value",
"==",
"\"true\"",
":",
"rec_curr",
".",
"is_obsolete",
"=",
"True",
"elif",
"field_name",
"in",
"self",
".",
"optional_attrs",
":",
"self",
".",
"update_rec",
"(",
"rec_curr",
",",
"field_name",
",",
"field_value",
")",
"else",
":",
"self",
".",
"_die",
"(",
"\"UNEXPECTED FIELD CONTENT: {L}\\n\"",
".",
"format",
"(",
"L",
"=",
"line",
")",
",",
"lnum",
")"
] | Add new fields to the current reference. | [
"Add",
"new",
"fields",
"to",
"the",
"current",
"reference",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L130-L161 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/obo_parser.py | OBOReader.update_rec | def update_rec(self, rec, name, value):
"""Update current GOTerm with optional record."""
# 'def' is a reserved word in python, do not use it as a Class attr.
if name == "def":
name = "defn"
# If we have a relationship, then we will split this into a further
# dictionary.
if hasattr(rec, name):
if name not in self.attrs_scalar:
if name not in self.attrs_nested:
getattr(rec, name).add(value)
else:
self._add_nested(rec, name, value)
else:
raise Exception("ATTR({NAME}) ALREADY SET({VAL})".format(
NAME=name, VAL=getattr(rec, name)))
else: # Initialize new GOTerm attr
if name in self.attrs_scalar:
setattr(rec, name, value)
elif name not in self.attrs_nested:
setattr(rec, name, set([value]))
else:
name = '_{:s}'.format(name)
setattr(rec, name, defaultdict(list))
self._add_nested(rec, name, value) | python | def update_rec(self, rec, name, value):
"""Update current GOTerm with optional record."""
# 'def' is a reserved word in python, do not use it as a Class attr.
if name == "def":
name = "defn"
# If we have a relationship, then we will split this into a further
# dictionary.
if hasattr(rec, name):
if name not in self.attrs_scalar:
if name not in self.attrs_nested:
getattr(rec, name).add(value)
else:
self._add_nested(rec, name, value)
else:
raise Exception("ATTR({NAME}) ALREADY SET({VAL})".format(
NAME=name, VAL=getattr(rec, name)))
else: # Initialize new GOTerm attr
if name in self.attrs_scalar:
setattr(rec, name, value)
elif name not in self.attrs_nested:
setattr(rec, name, set([value]))
else:
name = '_{:s}'.format(name)
setattr(rec, name, defaultdict(list))
self._add_nested(rec, name, value) | [
"def",
"update_rec",
"(",
"self",
",",
"rec",
",",
"name",
",",
"value",
")",
":",
"# 'def' is a reserved word in python, do not use it as a Class attr.",
"if",
"name",
"==",
"\"def\"",
":",
"name",
"=",
"\"defn\"",
"# If we have a relationship, then we will split this into a further",
"# dictionary.",
"if",
"hasattr",
"(",
"rec",
",",
"name",
")",
":",
"if",
"name",
"not",
"in",
"self",
".",
"attrs_scalar",
":",
"if",
"name",
"not",
"in",
"self",
".",
"attrs_nested",
":",
"getattr",
"(",
"rec",
",",
"name",
")",
".",
"add",
"(",
"value",
")",
"else",
":",
"self",
".",
"_add_nested",
"(",
"rec",
",",
"name",
",",
"value",
")",
"else",
":",
"raise",
"Exception",
"(",
"\"ATTR({NAME}) ALREADY SET({VAL})\"",
".",
"format",
"(",
"NAME",
"=",
"name",
",",
"VAL",
"=",
"getattr",
"(",
"rec",
",",
"name",
")",
")",
")",
"else",
":",
"# Initialize new GOTerm attr",
"if",
"name",
"in",
"self",
".",
"attrs_scalar",
":",
"setattr",
"(",
"rec",
",",
"name",
",",
"value",
")",
"elif",
"name",
"not",
"in",
"self",
".",
"attrs_nested",
":",
"setattr",
"(",
"rec",
",",
"name",
",",
"set",
"(",
"[",
"value",
"]",
")",
")",
"else",
":",
"name",
"=",
"'_{:s}'",
".",
"format",
"(",
"name",
")",
"setattr",
"(",
"rec",
",",
"name",
",",
"defaultdict",
"(",
"list",
")",
")",
"self",
".",
"_add_nested",
"(",
"rec",
",",
"name",
",",
"value",
")"
] | Update current GOTerm with optional record. | [
"Update",
"current",
"GOTerm",
"with",
"optional",
"record",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L163-L189 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/obo_parser.py | OBOReader._add_to_typedef | def _add_to_typedef(self, typedef_curr, line, lnum):
"""Add new fields to the current typedef."""
mtch = re.match(r'^(\S+):\s*(\S.*)$', line)
if mtch:
field_name = mtch.group(1)
field_value = mtch.group(2).split('!')[0].rstrip()
if field_name == "id":
self._chk_none(typedef_curr.id, lnum)
typedef_curr.id = field_value
elif field_name == "name":
self._chk_none(typedef_curr.name, lnum)
typedef_curr.name = field_value
elif field_name == "transitive_over":
typedef_curr.transitive_over.append(field_value)
elif field_name == "inverse_of":
self._chk_none(typedef_curr.inverse_of, lnum)
typedef_curr.inverse_of = field_value
# Note: there are other tags that aren't imported here.
else:
self._die("UNEXPECTED FIELD CONTENT: {L}\n".format(L=line), lnum) | python | def _add_to_typedef(self, typedef_curr, line, lnum):
"""Add new fields to the current typedef."""
mtch = re.match(r'^(\S+):\s*(\S.*)$', line)
if mtch:
field_name = mtch.group(1)
field_value = mtch.group(2).split('!')[0].rstrip()
if field_name == "id":
self._chk_none(typedef_curr.id, lnum)
typedef_curr.id = field_value
elif field_name == "name":
self._chk_none(typedef_curr.name, lnum)
typedef_curr.name = field_value
elif field_name == "transitive_over":
typedef_curr.transitive_over.append(field_value)
elif field_name == "inverse_of":
self._chk_none(typedef_curr.inverse_of, lnum)
typedef_curr.inverse_of = field_value
# Note: there are other tags that aren't imported here.
else:
self._die("UNEXPECTED FIELD CONTENT: {L}\n".format(L=line), lnum) | [
"def",
"_add_to_typedef",
"(",
"self",
",",
"typedef_curr",
",",
"line",
",",
"lnum",
")",
":",
"mtch",
"=",
"re",
".",
"match",
"(",
"r'^(\\S+):\\s*(\\S.*)$'",
",",
"line",
")",
"if",
"mtch",
":",
"field_name",
"=",
"mtch",
".",
"group",
"(",
"1",
")",
"field_value",
"=",
"mtch",
".",
"group",
"(",
"2",
")",
".",
"split",
"(",
"'!'",
")",
"[",
"0",
"]",
".",
"rstrip",
"(",
")",
"if",
"field_name",
"==",
"\"id\"",
":",
"self",
".",
"_chk_none",
"(",
"typedef_curr",
".",
"id",
",",
"lnum",
")",
"typedef_curr",
".",
"id",
"=",
"field_value",
"elif",
"field_name",
"==",
"\"name\"",
":",
"self",
".",
"_chk_none",
"(",
"typedef_curr",
".",
"name",
",",
"lnum",
")",
"typedef_curr",
".",
"name",
"=",
"field_value",
"elif",
"field_name",
"==",
"\"transitive_over\"",
":",
"typedef_curr",
".",
"transitive_over",
".",
"append",
"(",
"field_value",
")",
"elif",
"field_name",
"==",
"\"inverse_of\"",
":",
"self",
".",
"_chk_none",
"(",
"typedef_curr",
".",
"inverse_of",
",",
"lnum",
")",
"typedef_curr",
".",
"inverse_of",
"=",
"field_value",
"# Note: there are other tags that aren't imported here.",
"else",
":",
"self",
".",
"_die",
"(",
"\"UNEXPECTED FIELD CONTENT: {L}\\n\"",
".",
"format",
"(",
"L",
"=",
"line",
")",
",",
"lnum",
")"
] | Add new fields to the current typedef. | [
"Add",
"new",
"fields",
"to",
"the",
"current",
"typedef",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L191-L211 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/obo_parser.py | OBOReader._add_nested | def _add_nested(self, rec, name, value):
"""Adds a term's nested attributes."""
# Remove comments and split term into typedef / target term.
(typedef, target_term) = value.split('!')[0].rstrip().split(' ')
# Save the nested term.
getattr(rec, name)[typedef].append(target_term) | python | def _add_nested(self, rec, name, value):
"""Adds a term's nested attributes."""
# Remove comments and split term into typedef / target term.
(typedef, target_term) = value.split('!')[0].rstrip().split(' ')
# Save the nested term.
getattr(rec, name)[typedef].append(target_term) | [
"def",
"_add_nested",
"(",
"self",
",",
"rec",
",",
"name",
",",
"value",
")",
":",
"# Remove comments and split term into typedef / target term.",
"(",
"typedef",
",",
"target_term",
")",
"=",
"value",
".",
"split",
"(",
"'!'",
")",
"[",
"0",
"]",
".",
"rstrip",
"(",
")",
".",
"split",
"(",
"' '",
")",
"# Save the nested term.",
"getattr",
"(",
"rec",
",",
"name",
")",
"[",
"typedef",
"]",
".",
"append",
"(",
"target_term",
")"
] | Adds a term's nested attributes. | [
"Adds",
"a",
"term",
"s",
"nested",
"attributes",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L213-L219 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/obo_parser.py | OBOReader._init_optional_attrs | def _init_optional_attrs(self, optional_attrs):
"""Prepare to store data from user-desired optional fields.
Not loading these optional fields by default saves in space and speed.
But allow the possibility for saving these fields, if the user desires,
Including:
comment consider def is_class_level is_metadata_tag is_transitive
relationship replaced_by subset synonym transitive_over xref
"""
# Written by DV Klopfenstein
# Required attributes are always loaded. All others are optionally loaded.
self.attrs_req = ['id', 'alt_id', 'name', 'namespace', 'is_a', 'is_obsolete']
self.attrs_scalar = ['comment', 'defn',
'is_class_level', 'is_metadata_tag',
'is_transitive', 'transitive_over']
self.attrs_nested = frozenset(['relationship'])
# Allow user to specify either: 'def' or 'defn'
# 'def' is an obo field name, but 'defn' is legal Python attribute name
fnc = lambda aopt: aopt if aopt != "defn" else "def"
if optional_attrs is None:
optional_attrs = []
elif isinstance(optional_attrs, str):
optional_attrs = [fnc(optional_attrs)] if optional_attrs not in self.attrs_req else []
elif isinstance(optional_attrs, list) or isinstance(optional_attrs, set):
optional_attrs = set([fnc(f) for f in optional_attrs if f not in self.attrs_req])
else:
raise Exception("optional_attrs arg MUST BE A str, list, or set.")
self.optional_attrs = optional_attrs | python | def _init_optional_attrs(self, optional_attrs):
"""Prepare to store data from user-desired optional fields.
Not loading these optional fields by default saves in space and speed.
But allow the possibility for saving these fields, if the user desires,
Including:
comment consider def is_class_level is_metadata_tag is_transitive
relationship replaced_by subset synonym transitive_over xref
"""
# Written by DV Klopfenstein
# Required attributes are always loaded. All others are optionally loaded.
self.attrs_req = ['id', 'alt_id', 'name', 'namespace', 'is_a', 'is_obsolete']
self.attrs_scalar = ['comment', 'defn',
'is_class_level', 'is_metadata_tag',
'is_transitive', 'transitive_over']
self.attrs_nested = frozenset(['relationship'])
# Allow user to specify either: 'def' or 'defn'
# 'def' is an obo field name, but 'defn' is legal Python attribute name
fnc = lambda aopt: aopt if aopt != "defn" else "def"
if optional_attrs is None:
optional_attrs = []
elif isinstance(optional_attrs, str):
optional_attrs = [fnc(optional_attrs)] if optional_attrs not in self.attrs_req else []
elif isinstance(optional_attrs, list) or isinstance(optional_attrs, set):
optional_attrs = set([fnc(f) for f in optional_attrs if f not in self.attrs_req])
else:
raise Exception("optional_attrs arg MUST BE A str, list, or set.")
self.optional_attrs = optional_attrs | [
"def",
"_init_optional_attrs",
"(",
"self",
",",
"optional_attrs",
")",
":",
"# Written by DV Klopfenstein",
"# Required attributes are always loaded. All others are optionally loaded.",
"self",
".",
"attrs_req",
"=",
"[",
"'id'",
",",
"'alt_id'",
",",
"'name'",
",",
"'namespace'",
",",
"'is_a'",
",",
"'is_obsolete'",
"]",
"self",
".",
"attrs_scalar",
"=",
"[",
"'comment'",
",",
"'defn'",
",",
"'is_class_level'",
",",
"'is_metadata_tag'",
",",
"'is_transitive'",
",",
"'transitive_over'",
"]",
"self",
".",
"attrs_nested",
"=",
"frozenset",
"(",
"[",
"'relationship'",
"]",
")",
"# Allow user to specify either: 'def' or 'defn'",
"# 'def' is an obo field name, but 'defn' is legal Python attribute name",
"fnc",
"=",
"lambda",
"aopt",
":",
"aopt",
"if",
"aopt",
"!=",
"\"defn\"",
"else",
"\"def\"",
"if",
"optional_attrs",
"is",
"None",
":",
"optional_attrs",
"=",
"[",
"]",
"elif",
"isinstance",
"(",
"optional_attrs",
",",
"str",
")",
":",
"optional_attrs",
"=",
"[",
"fnc",
"(",
"optional_attrs",
")",
"]",
"if",
"optional_attrs",
"not",
"in",
"self",
".",
"attrs_req",
"else",
"[",
"]",
"elif",
"isinstance",
"(",
"optional_attrs",
",",
"list",
")",
"or",
"isinstance",
"(",
"optional_attrs",
",",
"set",
")",
":",
"optional_attrs",
"=",
"set",
"(",
"[",
"fnc",
"(",
"f",
")",
"for",
"f",
"in",
"optional_attrs",
"if",
"f",
"not",
"in",
"self",
".",
"attrs_req",
"]",
")",
"else",
":",
"raise",
"Exception",
"(",
"\"optional_attrs arg MUST BE A str, list, or set.\"",
")",
"self",
".",
"optional_attrs",
"=",
"optional_attrs"
] | Prepare to store data from user-desired optional fields.
Not loading these optional fields by default saves in space and speed.
But allow the possibility for saving these fields, if the user desires,
Including:
comment consider def is_class_level is_metadata_tag is_transitive
relationship replaced_by subset synonym transitive_over xref | [
"Prepare",
"to",
"store",
"data",
"from",
"user",
"-",
"desired",
"optional",
"fields",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L221-L248 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/obo_parser.py | OBOReader._die | def _die(self, msg, lnum):
"""Raise an Exception if file read is unexpected."""
raise Exception("**FATAL {FILE}({LNUM}): {MSG}\n".format(
FILE=self.obo_file, LNUM=lnum, MSG=msg)) | python | def _die(self, msg, lnum):
"""Raise an Exception if file read is unexpected."""
raise Exception("**FATAL {FILE}({LNUM}): {MSG}\n".format(
FILE=self.obo_file, LNUM=lnum, MSG=msg)) | [
"def",
"_die",
"(",
"self",
",",
"msg",
",",
"lnum",
")",
":",
"raise",
"Exception",
"(",
"\"**FATAL {FILE}({LNUM}): {MSG}\\n\"",
".",
"format",
"(",
"FILE",
"=",
"self",
".",
"obo_file",
",",
"LNUM",
"=",
"lnum",
",",
"MSG",
"=",
"msg",
")",
")"
] | Raise an Exception if file read is unexpected. | [
"Raise",
"an",
"Exception",
"if",
"file",
"read",
"is",
"unexpected",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L251-L254 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/obo_parser.py | GOTerm.write_hier_rec | def write_hier_rec(self, gos_printed, out=sys.stdout,
len_dash=1, max_depth=None, num_child=None, short_prt=False,
include_only=None, go_marks=None,
depth=1, dp="-"):
"""Write hierarchy for a GO Term record."""
# Added by DV Klopfenstein
GO_id = self.id
# Shortens hierarchy report by only printing the hierarchy
# for the sub-set of user-specified GO terms which are connected.
if include_only is not None and GO_id not in include_only:
return
nrp = short_prt and GO_id in gos_printed
if go_marks is not None:
out.write('{} '.format('>' if GO_id in go_marks else ' '))
if len_dash is not None:
# Default character indicating hierarchy level is '-'.
# '=' is used to indicate a hierarchical path printed in detail previously.
letter = '-' if not nrp or not self.children else '='
dp = ''.join([letter]*depth)
out.write('{DASHES:{N}} '.format(DASHES=dp, N=len_dash))
if num_child is not None:
out.write('{N:>5} '.format(N=len(self.get_all_children())))
out.write('{GO}\tL-{L:>02}\tD-{D:>02}\t{desc}\n'.format(
GO=self.id, L=self.level, D=self.depth, desc=self.name))
# Track GOs previously printed only if needed
if short_prt:
gos_printed.add(GO_id)
# Do not print hierarchy below this turn if it has already been printed
if nrp:
return
depth += 1
if max_depth is not None and depth > max_depth:
return
for p in self.children:
p.write_hier_rec(gos_printed, out, len_dash, max_depth, num_child, short_prt,
include_only, go_marks,
depth, dp) | python | def write_hier_rec(self, gos_printed, out=sys.stdout,
len_dash=1, max_depth=None, num_child=None, short_prt=False,
include_only=None, go_marks=None,
depth=1, dp="-"):
"""Write hierarchy for a GO Term record."""
# Added by DV Klopfenstein
GO_id = self.id
# Shortens hierarchy report by only printing the hierarchy
# for the sub-set of user-specified GO terms which are connected.
if include_only is not None and GO_id not in include_only:
return
nrp = short_prt and GO_id in gos_printed
if go_marks is not None:
out.write('{} '.format('>' if GO_id in go_marks else ' '))
if len_dash is not None:
# Default character indicating hierarchy level is '-'.
# '=' is used to indicate a hierarchical path printed in detail previously.
letter = '-' if not nrp or not self.children else '='
dp = ''.join([letter]*depth)
out.write('{DASHES:{N}} '.format(DASHES=dp, N=len_dash))
if num_child is not None:
out.write('{N:>5} '.format(N=len(self.get_all_children())))
out.write('{GO}\tL-{L:>02}\tD-{D:>02}\t{desc}\n'.format(
GO=self.id, L=self.level, D=self.depth, desc=self.name))
# Track GOs previously printed only if needed
if short_prt:
gos_printed.add(GO_id)
# Do not print hierarchy below this turn if it has already been printed
if nrp:
return
depth += 1
if max_depth is not None and depth > max_depth:
return
for p in self.children:
p.write_hier_rec(gos_printed, out, len_dash, max_depth, num_child, short_prt,
include_only, go_marks,
depth, dp) | [
"def",
"write_hier_rec",
"(",
"self",
",",
"gos_printed",
",",
"out",
"=",
"sys",
".",
"stdout",
",",
"len_dash",
"=",
"1",
",",
"max_depth",
"=",
"None",
",",
"num_child",
"=",
"None",
",",
"short_prt",
"=",
"False",
",",
"include_only",
"=",
"None",
",",
"go_marks",
"=",
"None",
",",
"depth",
"=",
"1",
",",
"dp",
"=",
"\"-\"",
")",
":",
"# Added by DV Klopfenstein",
"GO_id",
"=",
"self",
".",
"id",
"# Shortens hierarchy report by only printing the hierarchy",
"# for the sub-set of user-specified GO terms which are connected.",
"if",
"include_only",
"is",
"not",
"None",
"and",
"GO_id",
"not",
"in",
"include_only",
":",
"return",
"nrp",
"=",
"short_prt",
"and",
"GO_id",
"in",
"gos_printed",
"if",
"go_marks",
"is",
"not",
"None",
":",
"out",
".",
"write",
"(",
"'{} '",
".",
"format",
"(",
"'>'",
"if",
"GO_id",
"in",
"go_marks",
"else",
"' '",
")",
")",
"if",
"len_dash",
"is",
"not",
"None",
":",
"# Default character indicating hierarchy level is '-'.",
"# '=' is used to indicate a hierarchical path printed in detail previously.",
"letter",
"=",
"'-'",
"if",
"not",
"nrp",
"or",
"not",
"self",
".",
"children",
"else",
"'='",
"dp",
"=",
"''",
".",
"join",
"(",
"[",
"letter",
"]",
"*",
"depth",
")",
"out",
".",
"write",
"(",
"'{DASHES:{N}} '",
".",
"format",
"(",
"DASHES",
"=",
"dp",
",",
"N",
"=",
"len_dash",
")",
")",
"if",
"num_child",
"is",
"not",
"None",
":",
"out",
".",
"write",
"(",
"'{N:>5} '",
".",
"format",
"(",
"N",
"=",
"len",
"(",
"self",
".",
"get_all_children",
"(",
")",
")",
")",
")",
"out",
".",
"write",
"(",
"'{GO}\\tL-{L:>02}\\tD-{D:>02}\\t{desc}\\n'",
".",
"format",
"(",
"GO",
"=",
"self",
".",
"id",
",",
"L",
"=",
"self",
".",
"level",
",",
"D",
"=",
"self",
".",
"depth",
",",
"desc",
"=",
"self",
".",
"name",
")",
")",
"# Track GOs previously printed only if needed",
"if",
"short_prt",
":",
"gos_printed",
".",
"add",
"(",
"GO_id",
")",
"# Do not print hierarchy below this turn if it has already been printed",
"if",
"nrp",
":",
"return",
"depth",
"+=",
"1",
"if",
"max_depth",
"is",
"not",
"None",
"and",
"depth",
">",
"max_depth",
":",
"return",
"for",
"p",
"in",
"self",
".",
"children",
":",
"p",
".",
"write_hier_rec",
"(",
"gos_printed",
",",
"out",
",",
"len_dash",
",",
"max_depth",
",",
"num_child",
",",
"short_prt",
",",
"include_only",
",",
"go_marks",
",",
"depth",
",",
"dp",
")"
] | Write hierarchy for a GO Term record. | [
"Write",
"hierarchy",
"for",
"a",
"GO",
"Term",
"record",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L349-L385 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/obo_parser.py | GODag.write_hier_all | def write_hier_all(self, out=sys.stdout,
len_dash=1, max_depth=None, num_child=None, short_prt=False):
"""Write hierarchy for all GO Terms in obo file."""
# Print: [biological_process, molecular_function, and cellular_component]
for go_id in ['GO:0008150', 'GO:0003674', 'GO:0005575']:
self.write_hier(go_id, out, len_dash, max_depth, num_child, short_prt, None) | python | def write_hier_all(self, out=sys.stdout,
len_dash=1, max_depth=None, num_child=None, short_prt=False):
"""Write hierarchy for all GO Terms in obo file."""
# Print: [biological_process, molecular_function, and cellular_component]
for go_id in ['GO:0008150', 'GO:0003674', 'GO:0005575']:
self.write_hier(go_id, out, len_dash, max_depth, num_child, short_prt, None) | [
"def",
"write_hier_all",
"(",
"self",
",",
"out",
"=",
"sys",
".",
"stdout",
",",
"len_dash",
"=",
"1",
",",
"max_depth",
"=",
"None",
",",
"num_child",
"=",
"None",
",",
"short_prt",
"=",
"False",
")",
":",
"# Print: [biological_process, molecular_function, and cellular_component]",
"for",
"go_id",
"in",
"[",
"'GO:0008150'",
",",
"'GO:0003674'",
",",
"'GO:0005575'",
"]",
":",
"self",
".",
"write_hier",
"(",
"go_id",
",",
"out",
",",
"len_dash",
",",
"max_depth",
",",
"num_child",
",",
"short_prt",
",",
"None",
")"
] | Write hierarchy for all GO Terms in obo file. | [
"Write",
"hierarchy",
"for",
"all",
"GO",
"Terms",
"in",
"obo",
"file",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L492-L497 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/obo_parser.py | GODag.write_hier | def write_hier(self, GO_id, out=sys.stdout,
len_dash=1, max_depth=None, num_child=None, short_prt=False,
include_only=None, go_marks=None):
"""Write hierarchy for a GO Term."""
gos_printed = set()
self[GO_id].write_hier_rec(gos_printed, out, len_dash, max_depth, num_child,
short_prt, include_only, go_marks) | python | def write_hier(self, GO_id, out=sys.stdout,
len_dash=1, max_depth=None, num_child=None, short_prt=False,
include_only=None, go_marks=None):
"""Write hierarchy for a GO Term."""
gos_printed = set()
self[GO_id].write_hier_rec(gos_printed, out, len_dash, max_depth, num_child,
short_prt, include_only, go_marks) | [
"def",
"write_hier",
"(",
"self",
",",
"GO_id",
",",
"out",
"=",
"sys",
".",
"stdout",
",",
"len_dash",
"=",
"1",
",",
"max_depth",
"=",
"None",
",",
"num_child",
"=",
"None",
",",
"short_prt",
"=",
"False",
",",
"include_only",
"=",
"None",
",",
"go_marks",
"=",
"None",
")",
":",
"gos_printed",
"=",
"set",
"(",
")",
"self",
"[",
"GO_id",
"]",
".",
"write_hier_rec",
"(",
"gos_printed",
",",
"out",
",",
"len_dash",
",",
"max_depth",
",",
"num_child",
",",
"short_prt",
",",
"include_only",
",",
"go_marks",
")"
] | Write hierarchy for a GO Term. | [
"Write",
"hierarchy",
"for",
"a",
"GO",
"Term",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L499-L505 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/obo_parser.py | GODag.paths_to_top | def paths_to_top(self, term):
""" Returns all possible paths to the root node
Each path includes the term given. The order of the path is
top -> bottom, i.e. it starts with the root and ends with the
given term (inclusively).
Parameters:
-----------
- term:
the id of the GO term, where the paths begin (i.e. the
accession 'GO:0003682')
Returns:
--------
- a list of lists of GO Terms
"""
# error handling consistent with original authors
if term not in self:
print("Term %s not found!" % term, file=sys.stderr)
return
def _paths_to_top_recursive(rec):
if rec.level == 0:
return [[rec]]
paths = []
for parent in rec.parents:
top_paths = _paths_to_top_recursive(parent)
for top_path in top_paths:
top_path.append(rec)
paths.append(top_path)
return paths
go_term = self[term]
return _paths_to_top_recursive(go_term) | python | def paths_to_top(self, term):
""" Returns all possible paths to the root node
Each path includes the term given. The order of the path is
top -> bottom, i.e. it starts with the root and ends with the
given term (inclusively).
Parameters:
-----------
- term:
the id of the GO term, where the paths begin (i.e. the
accession 'GO:0003682')
Returns:
--------
- a list of lists of GO Terms
"""
# error handling consistent with original authors
if term not in self:
print("Term %s not found!" % term, file=sys.stderr)
return
def _paths_to_top_recursive(rec):
if rec.level == 0:
return [[rec]]
paths = []
for parent in rec.parents:
top_paths = _paths_to_top_recursive(parent)
for top_path in top_paths:
top_path.append(rec)
paths.append(top_path)
return paths
go_term = self[term]
return _paths_to_top_recursive(go_term) | [
"def",
"paths_to_top",
"(",
"self",
",",
"term",
")",
":",
"# error handling consistent with original authors",
"if",
"term",
"not",
"in",
"self",
":",
"print",
"(",
"\"Term %s not found!\"",
"%",
"term",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"return",
"def",
"_paths_to_top_recursive",
"(",
"rec",
")",
":",
"if",
"rec",
".",
"level",
"==",
"0",
":",
"return",
"[",
"[",
"rec",
"]",
"]",
"paths",
"=",
"[",
"]",
"for",
"parent",
"in",
"rec",
".",
"parents",
":",
"top_paths",
"=",
"_paths_to_top_recursive",
"(",
"parent",
")",
"for",
"top_path",
"in",
"top_paths",
":",
"top_path",
".",
"append",
"(",
"rec",
")",
"paths",
".",
"append",
"(",
"top_path",
")",
"return",
"paths",
"go_term",
"=",
"self",
"[",
"term",
"]",
"return",
"_paths_to_top_recursive",
"(",
"go_term",
")"
] | Returns all possible paths to the root node
Each path includes the term given. The order of the path is
top -> bottom, i.e. it starts with the root and ends with the
given term (inclusively).
Parameters:
-----------
- term:
the id of the GO term, where the paths begin (i.e. the
accession 'GO:0003682')
Returns:
--------
- a list of lists of GO Terms | [
"Returns",
"all",
"possible",
"paths",
"to",
"the",
"root",
"node"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L522-L556 | train |
ga4gh/ga4gh-server | ga4gh/server/datamodel/obo_parser.py | GODag.make_graph_pydot | def make_graph_pydot(self, recs, nodecolor,
edgecolor, dpi,
draw_parents=True, draw_children=True):
"""draw AMIGO style network, lineage containing one query record."""
import pydot
G = pydot.Dot(graph_type='digraph', dpi="{}".format(dpi)) # Directed Graph
edgeset = set()
usr_ids = [rec.id for rec in recs]
for rec in recs:
if draw_parents:
edgeset.update(rec.get_all_parent_edges())
if draw_children:
edgeset.update(rec.get_all_child_edges())
lw = self._label_wrap
rec_id_set = set([rec_id for endpts in edgeset for rec_id in endpts])
nodes = {str(ID):pydot.Node(
lw(ID).replace("GO:",""), # Node name
shape="box",
style="rounded, filled",
# Highlight query terms in plum:
fillcolor="beige" if ID not in usr_ids else "plum",
color=nodecolor)
for ID in rec_id_set}
# add nodes explicitly via add_node
for rec_id, node in nodes.items():
G.add_node(node)
for src, target in edgeset:
# default layout in graphviz is top->bottom, so we invert
# the direction and plot using dir="back"
G.add_edge(pydot.Edge(nodes[target], nodes[src],
shape="normal",
color=edgecolor,
label="is_a",
dir="back"))
return G | python | def make_graph_pydot(self, recs, nodecolor,
edgecolor, dpi,
draw_parents=True, draw_children=True):
"""draw AMIGO style network, lineage containing one query record."""
import pydot
G = pydot.Dot(graph_type='digraph', dpi="{}".format(dpi)) # Directed Graph
edgeset = set()
usr_ids = [rec.id for rec in recs]
for rec in recs:
if draw_parents:
edgeset.update(rec.get_all_parent_edges())
if draw_children:
edgeset.update(rec.get_all_child_edges())
lw = self._label_wrap
rec_id_set = set([rec_id for endpts in edgeset for rec_id in endpts])
nodes = {str(ID):pydot.Node(
lw(ID).replace("GO:",""), # Node name
shape="box",
style="rounded, filled",
# Highlight query terms in plum:
fillcolor="beige" if ID not in usr_ids else "plum",
color=nodecolor)
for ID in rec_id_set}
# add nodes explicitly via add_node
for rec_id, node in nodes.items():
G.add_node(node)
for src, target in edgeset:
# default layout in graphviz is top->bottom, so we invert
# the direction and plot using dir="back"
G.add_edge(pydot.Edge(nodes[target], nodes[src],
shape="normal",
color=edgecolor,
label="is_a",
dir="back"))
return G | [
"def",
"make_graph_pydot",
"(",
"self",
",",
"recs",
",",
"nodecolor",
",",
"edgecolor",
",",
"dpi",
",",
"draw_parents",
"=",
"True",
",",
"draw_children",
"=",
"True",
")",
":",
"import",
"pydot",
"G",
"=",
"pydot",
".",
"Dot",
"(",
"graph_type",
"=",
"'digraph'",
",",
"dpi",
"=",
"\"{}\"",
".",
"format",
"(",
"dpi",
")",
")",
"# Directed Graph",
"edgeset",
"=",
"set",
"(",
")",
"usr_ids",
"=",
"[",
"rec",
".",
"id",
"for",
"rec",
"in",
"recs",
"]",
"for",
"rec",
"in",
"recs",
":",
"if",
"draw_parents",
":",
"edgeset",
".",
"update",
"(",
"rec",
".",
"get_all_parent_edges",
"(",
")",
")",
"if",
"draw_children",
":",
"edgeset",
".",
"update",
"(",
"rec",
".",
"get_all_child_edges",
"(",
")",
")",
"lw",
"=",
"self",
".",
"_label_wrap",
"rec_id_set",
"=",
"set",
"(",
"[",
"rec_id",
"for",
"endpts",
"in",
"edgeset",
"for",
"rec_id",
"in",
"endpts",
"]",
")",
"nodes",
"=",
"{",
"str",
"(",
"ID",
")",
":",
"pydot",
".",
"Node",
"(",
"lw",
"(",
"ID",
")",
".",
"replace",
"(",
"\"GO:\"",
",",
"\"\"",
")",
",",
"# Node name",
"shape",
"=",
"\"box\"",
",",
"style",
"=",
"\"rounded, filled\"",
",",
"# Highlight query terms in plum:",
"fillcolor",
"=",
"\"beige\"",
"if",
"ID",
"not",
"in",
"usr_ids",
"else",
"\"plum\"",
",",
"color",
"=",
"nodecolor",
")",
"for",
"ID",
"in",
"rec_id_set",
"}",
"# add nodes explicitly via add_node",
"for",
"rec_id",
",",
"node",
"in",
"nodes",
".",
"items",
"(",
")",
":",
"G",
".",
"add_node",
"(",
"node",
")",
"for",
"src",
",",
"target",
"in",
"edgeset",
":",
"# default layout in graphviz is top->bottom, so we invert",
"# the direction and plot using dir=\"back\"",
"G",
".",
"add_edge",
"(",
"pydot",
".",
"Edge",
"(",
"nodes",
"[",
"target",
"]",
",",
"nodes",
"[",
"src",
"]",
",",
"shape",
"=",
"\"normal\"",
",",
"color",
"=",
"edgecolor",
",",
"label",
"=",
"\"is_a\"",
",",
"dir",
"=",
"\"back\"",
")",
")",
"return",
"G"
] | draw AMIGO style network, lineage containing one query record. | [
"draw",
"AMIGO",
"style",
"network",
"lineage",
"containing",
"one",
"query",
"record",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L563-L601 | train |
ga4gh/ga4gh-server | ga4gh/server/sqlite_backend.py | sqliteRowsToDicts | def sqliteRowsToDicts(sqliteRows):
"""
Unpacks sqlite rows as returned by fetchall
into an array of simple dicts.
:param sqliteRows: array of rows returned from fetchall DB call
:return: array of dicts, keyed by the column names.
"""
return map(lambda r: dict(zip(r.keys(), r)), sqliteRows) | python | def sqliteRowsToDicts(sqliteRows):
"""
Unpacks sqlite rows as returned by fetchall
into an array of simple dicts.
:param sqliteRows: array of rows returned from fetchall DB call
:return: array of dicts, keyed by the column names.
"""
return map(lambda r: dict(zip(r.keys(), r)), sqliteRows) | [
"def",
"sqliteRowsToDicts",
"(",
"sqliteRows",
")",
":",
"return",
"map",
"(",
"lambda",
"r",
":",
"dict",
"(",
"zip",
"(",
"r",
".",
"keys",
"(",
")",
",",
"r",
")",
")",
",",
"sqliteRows",
")"
] | Unpacks sqlite rows as returned by fetchall
into an array of simple dicts.
:param sqliteRows: array of rows returned from fetchall DB call
:return: array of dicts, keyed by the column names. | [
"Unpacks",
"sqlite",
"rows",
"as",
"returned",
"by",
"fetchall",
"into",
"an",
"array",
"of",
"simple",
"dicts",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/sqlite_backend.py#L13-L21 | train |
ga4gh/ga4gh-server | ga4gh/server/sqlite_backend.py | limitsSql | def limitsSql(startIndex=0, maxResults=0):
"""
Construct a SQL LIMIT clause
"""
if startIndex and maxResults:
return " LIMIT {}, {}".format(startIndex, maxResults)
elif startIndex:
raise Exception("startIndex was provided, but maxResults was not")
elif maxResults:
return " LIMIT {}".format(maxResults)
else:
return "" | python | def limitsSql(startIndex=0, maxResults=0):
"""
Construct a SQL LIMIT clause
"""
if startIndex and maxResults:
return " LIMIT {}, {}".format(startIndex, maxResults)
elif startIndex:
raise Exception("startIndex was provided, but maxResults was not")
elif maxResults:
return " LIMIT {}".format(maxResults)
else:
return "" | [
"def",
"limitsSql",
"(",
"startIndex",
"=",
"0",
",",
"maxResults",
"=",
"0",
")",
":",
"if",
"startIndex",
"and",
"maxResults",
":",
"return",
"\" LIMIT {}, {}\"",
".",
"format",
"(",
"startIndex",
",",
"maxResults",
")",
"elif",
"startIndex",
":",
"raise",
"Exception",
"(",
"\"startIndex was provided, but maxResults was not\"",
")",
"elif",
"maxResults",
":",
"return",
"\" LIMIT {}\"",
".",
"format",
"(",
"maxResults",
")",
"else",
":",
"return",
"\"\""
] | Construct a SQL LIMIT clause | [
"Construct",
"a",
"SQL",
"LIMIT",
"clause"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/sqlite_backend.py#L35-L46 | train |
ga4gh/ga4gh-server | ga4gh/server/sqlite_backend.py | iterativeFetch | def iterativeFetch(query, batchSize=default_batch_size):
"""
Returns rows of a sql fetch query on demand
"""
while True:
rows = query.fetchmany(batchSize)
if not rows:
break
rowDicts = sqliteRowsToDicts(rows)
for rowDict in rowDicts:
yield rowDict | python | def iterativeFetch(query, batchSize=default_batch_size):
"""
Returns rows of a sql fetch query on demand
"""
while True:
rows = query.fetchmany(batchSize)
if not rows:
break
rowDicts = sqliteRowsToDicts(rows)
for rowDict in rowDicts:
yield rowDict | [
"def",
"iterativeFetch",
"(",
"query",
",",
"batchSize",
"=",
"default_batch_size",
")",
":",
"while",
"True",
":",
"rows",
"=",
"query",
".",
"fetchmany",
"(",
"batchSize",
")",
"if",
"not",
"rows",
":",
"break",
"rowDicts",
"=",
"sqliteRowsToDicts",
"(",
"rows",
")",
"for",
"rowDict",
"in",
"rowDicts",
":",
"yield",
"rowDict"
] | Returns rows of a sql fetch query on demand | [
"Returns",
"rows",
"of",
"a",
"sql",
"fetch",
"query",
"on",
"demand"
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/sqlite_backend.py#L52-L62 | train |
ga4gh/ga4gh-server | ga4gh/server/paging.py | _parsePageToken | def _parsePageToken(pageToken, numValues):
"""
Parses the specified pageToken and returns a list of the specified
number of values. Page tokens are assumed to consist of a fixed
number of integers seperated by colons. If the page token does
not conform to this specification, raise a InvalidPageToken
exception.
"""
tokens = pageToken.split(":")
if len(tokens) != numValues:
msg = "Invalid number of values in page token"
raise exceptions.BadPageTokenException(msg)
try:
values = map(int, tokens)
except ValueError:
msg = "Malformed integers in page token"
raise exceptions.BadPageTokenException(msg)
return values | python | def _parsePageToken(pageToken, numValues):
"""
Parses the specified pageToken and returns a list of the specified
number of values. Page tokens are assumed to consist of a fixed
number of integers seperated by colons. If the page token does
not conform to this specification, raise a InvalidPageToken
exception.
"""
tokens = pageToken.split(":")
if len(tokens) != numValues:
msg = "Invalid number of values in page token"
raise exceptions.BadPageTokenException(msg)
try:
values = map(int, tokens)
except ValueError:
msg = "Malformed integers in page token"
raise exceptions.BadPageTokenException(msg)
return values | [
"def",
"_parsePageToken",
"(",
"pageToken",
",",
"numValues",
")",
":",
"tokens",
"=",
"pageToken",
".",
"split",
"(",
"\":\"",
")",
"if",
"len",
"(",
"tokens",
")",
"!=",
"numValues",
":",
"msg",
"=",
"\"Invalid number of values in page token\"",
"raise",
"exceptions",
".",
"BadPageTokenException",
"(",
"msg",
")",
"try",
":",
"values",
"=",
"map",
"(",
"int",
",",
"tokens",
")",
"except",
"ValueError",
":",
"msg",
"=",
"\"Malformed integers in page token\"",
"raise",
"exceptions",
".",
"BadPageTokenException",
"(",
"msg",
")",
"return",
"values"
] | Parses the specified pageToken and returns a list of the specified
number of values. Page tokens are assumed to consist of a fixed
number of integers seperated by colons. If the page token does
not conform to this specification, raise a InvalidPageToken
exception. | [
"Parses",
"the",
"specified",
"pageToken",
"and",
"returns",
"a",
"list",
"of",
"the",
"specified",
"number",
"of",
"values",
".",
"Page",
"tokens",
"are",
"assumed",
"to",
"consist",
"of",
"a",
"fixed",
"number",
"of",
"integers",
"seperated",
"by",
"colons",
".",
"If",
"the",
"page",
"token",
"does",
"not",
"conform",
"to",
"this",
"specification",
"raise",
"a",
"InvalidPageToken",
"exception",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/paging.py#L13-L30 | train |
ga4gh/ga4gh-server | ga4gh/server/paging.py | _parseIntegerArgument | def _parseIntegerArgument(args, key, defaultValue):
"""
Attempts to parse the specified key in the specified argument
dictionary into an integer. If the argument cannot be parsed,
raises a BadRequestIntegerException. If the key is not present,
return the specified default value.
"""
ret = defaultValue
try:
if key in args:
try:
ret = int(args[key])
except ValueError:
raise exceptions.BadRequestIntegerException(key, args[key])
except TypeError:
raise Exception((key, args))
return ret | python | def _parseIntegerArgument(args, key, defaultValue):
"""
Attempts to parse the specified key in the specified argument
dictionary into an integer. If the argument cannot be parsed,
raises a BadRequestIntegerException. If the key is not present,
return the specified default value.
"""
ret = defaultValue
try:
if key in args:
try:
ret = int(args[key])
except ValueError:
raise exceptions.BadRequestIntegerException(key, args[key])
except TypeError:
raise Exception((key, args))
return ret | [
"def",
"_parseIntegerArgument",
"(",
"args",
",",
"key",
",",
"defaultValue",
")",
":",
"ret",
"=",
"defaultValue",
"try",
":",
"if",
"key",
"in",
"args",
":",
"try",
":",
"ret",
"=",
"int",
"(",
"args",
"[",
"key",
"]",
")",
"except",
"ValueError",
":",
"raise",
"exceptions",
".",
"BadRequestIntegerException",
"(",
"key",
",",
"args",
"[",
"key",
"]",
")",
"except",
"TypeError",
":",
"raise",
"Exception",
"(",
"(",
"key",
",",
"args",
")",
")",
"return",
"ret"
] | Attempts to parse the specified key in the specified argument
dictionary into an integer. If the argument cannot be parsed,
raises a BadRequestIntegerException. If the key is not present,
return the specified default value. | [
"Attempts",
"to",
"parse",
"the",
"specified",
"key",
"in",
"the",
"specified",
"argument",
"dictionary",
"into",
"an",
"integer",
".",
"If",
"the",
"argument",
"cannot",
"be",
"parsed",
"raises",
"a",
"BadRequestIntegerException",
".",
"If",
"the",
"key",
"is",
"not",
"present",
"return",
"the",
"specified",
"default",
"value",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/paging.py#L33-L49 | train |
ga4gh/ga4gh-server | ga4gh/server/paging.py | IntervalIterator._initialiseIteration | def _initialiseIteration(self):
"""
Starts a new iteration.
"""
self._searchIterator = self._search(
self._request.start,
self._request.end if self._request.end != 0 else None)
self._currentObject = next(self._searchIterator, None)
if self._currentObject is not None:
self._nextObject = next(self._searchIterator, None)
self._searchAnchor = self._request.start
self._distanceFromAnchor = 0
firstObjectStart = self._getStart(self._currentObject)
if firstObjectStart > self._request.start:
self._searchAnchor = firstObjectStart | python | def _initialiseIteration(self):
"""
Starts a new iteration.
"""
self._searchIterator = self._search(
self._request.start,
self._request.end if self._request.end != 0 else None)
self._currentObject = next(self._searchIterator, None)
if self._currentObject is not None:
self._nextObject = next(self._searchIterator, None)
self._searchAnchor = self._request.start
self._distanceFromAnchor = 0
firstObjectStart = self._getStart(self._currentObject)
if firstObjectStart > self._request.start:
self._searchAnchor = firstObjectStart | [
"def",
"_initialiseIteration",
"(",
"self",
")",
":",
"self",
".",
"_searchIterator",
"=",
"self",
".",
"_search",
"(",
"self",
".",
"_request",
".",
"start",
",",
"self",
".",
"_request",
".",
"end",
"if",
"self",
".",
"_request",
".",
"end",
"!=",
"0",
"else",
"None",
")",
"self",
".",
"_currentObject",
"=",
"next",
"(",
"self",
".",
"_searchIterator",
",",
"None",
")",
"if",
"self",
".",
"_currentObject",
"is",
"not",
"None",
":",
"self",
".",
"_nextObject",
"=",
"next",
"(",
"self",
".",
"_searchIterator",
",",
"None",
")",
"self",
".",
"_searchAnchor",
"=",
"self",
".",
"_request",
".",
"start",
"self",
".",
"_distanceFromAnchor",
"=",
"0",
"firstObjectStart",
"=",
"self",
".",
"_getStart",
"(",
"self",
".",
"_currentObject",
")",
"if",
"firstObjectStart",
">",
"self",
".",
"_request",
".",
"start",
":",
"self",
".",
"_searchAnchor",
"=",
"firstObjectStart"
] | Starts a new iteration. | [
"Starts",
"a",
"new",
"iteration",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/paging.py#L83-L97 | train |
ga4gh/ga4gh-server | ga4gh/server/paging.py | IntervalIterator._pickUpIteration | def _pickUpIteration(self, searchAnchor, objectsToSkip):
"""
Picks up iteration from a previously provided page token. There are two
different phases here:
1) We are iterating over the initial set of intervals in which start
is < the search start coorindate.
2) We are iterating over the remaining intervals in which start >= to
the search start coordinate.
"""
self._searchAnchor = searchAnchor
self._distanceFromAnchor = objectsToSkip
self._searchIterator = self._search(
searchAnchor,
self._request.end if self._request.end != 0 else None)
obj = next(self._searchIterator)
if searchAnchor == self._request.start:
# This is the initial set of intervals, we just skip forward
# objectsToSkip positions
for _ in range(objectsToSkip):
obj = next(self._searchIterator)
else:
# Now, we are past this initial set of intervals.
# First, we need to skip forward over the intervals where
# start < searchAnchor, as we've seen these already.
while self._getStart(obj) < searchAnchor:
obj = next(self._searchIterator)
# Now, we skip over objectsToSkip objects such that
# start == searchAnchor
for _ in range(objectsToSkip):
if self._getStart(obj) != searchAnchor:
raise exceptions.BadPageTokenException
obj = next(self._searchIterator)
self._currentObject = obj
self._nextObject = next(self._searchIterator, None) | python | def _pickUpIteration(self, searchAnchor, objectsToSkip):
"""
Picks up iteration from a previously provided page token. There are two
different phases here:
1) We are iterating over the initial set of intervals in which start
is < the search start coorindate.
2) We are iterating over the remaining intervals in which start >= to
the search start coordinate.
"""
self._searchAnchor = searchAnchor
self._distanceFromAnchor = objectsToSkip
self._searchIterator = self._search(
searchAnchor,
self._request.end if self._request.end != 0 else None)
obj = next(self._searchIterator)
if searchAnchor == self._request.start:
# This is the initial set of intervals, we just skip forward
# objectsToSkip positions
for _ in range(objectsToSkip):
obj = next(self._searchIterator)
else:
# Now, we are past this initial set of intervals.
# First, we need to skip forward over the intervals where
# start < searchAnchor, as we've seen these already.
while self._getStart(obj) < searchAnchor:
obj = next(self._searchIterator)
# Now, we skip over objectsToSkip objects such that
# start == searchAnchor
for _ in range(objectsToSkip):
if self._getStart(obj) != searchAnchor:
raise exceptions.BadPageTokenException
obj = next(self._searchIterator)
self._currentObject = obj
self._nextObject = next(self._searchIterator, None) | [
"def",
"_pickUpIteration",
"(",
"self",
",",
"searchAnchor",
",",
"objectsToSkip",
")",
":",
"self",
".",
"_searchAnchor",
"=",
"searchAnchor",
"self",
".",
"_distanceFromAnchor",
"=",
"objectsToSkip",
"self",
".",
"_searchIterator",
"=",
"self",
".",
"_search",
"(",
"searchAnchor",
",",
"self",
".",
"_request",
".",
"end",
"if",
"self",
".",
"_request",
".",
"end",
"!=",
"0",
"else",
"None",
")",
"obj",
"=",
"next",
"(",
"self",
".",
"_searchIterator",
")",
"if",
"searchAnchor",
"==",
"self",
".",
"_request",
".",
"start",
":",
"# This is the initial set of intervals, we just skip forward",
"# objectsToSkip positions",
"for",
"_",
"in",
"range",
"(",
"objectsToSkip",
")",
":",
"obj",
"=",
"next",
"(",
"self",
".",
"_searchIterator",
")",
"else",
":",
"# Now, we are past this initial set of intervals.",
"# First, we need to skip forward over the intervals where",
"# start < searchAnchor, as we've seen these already.",
"while",
"self",
".",
"_getStart",
"(",
"obj",
")",
"<",
"searchAnchor",
":",
"obj",
"=",
"next",
"(",
"self",
".",
"_searchIterator",
")",
"# Now, we skip over objectsToSkip objects such that",
"# start == searchAnchor",
"for",
"_",
"in",
"range",
"(",
"objectsToSkip",
")",
":",
"if",
"self",
".",
"_getStart",
"(",
"obj",
")",
"!=",
"searchAnchor",
":",
"raise",
"exceptions",
".",
"BadPageTokenException",
"obj",
"=",
"next",
"(",
"self",
".",
"_searchIterator",
")",
"self",
".",
"_currentObject",
"=",
"obj",
"self",
".",
"_nextObject",
"=",
"next",
"(",
"self",
".",
"_searchIterator",
",",
"None",
")"
] | Picks up iteration from a previously provided page token. There are two
different phases here:
1) We are iterating over the initial set of intervals in which start
is < the search start coorindate.
2) We are iterating over the remaining intervals in which start >= to
the search start coordinate. | [
"Picks",
"up",
"iteration",
"from",
"a",
"previously",
"provided",
"page",
"token",
".",
"There",
"are",
"two",
"different",
"phases",
"here",
":",
"1",
")",
"We",
"are",
"iterating",
"over",
"the",
"initial",
"set",
"of",
"intervals",
"in",
"which",
"start",
"is",
"<",
"the",
"search",
"start",
"coorindate",
".",
"2",
")",
"We",
"are",
"iterating",
"over",
"the",
"remaining",
"intervals",
"in",
"which",
"start",
">",
"=",
"to",
"the",
"search",
"start",
"coordinate",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/paging.py#L99-L132 | train |
ga4gh/ga4gh-server | ga4gh/server/paging.py | IntervalIterator.next | def next(self):
"""
Returns the next (object, nextPageToken) pair.
"""
if self._currentObject is None:
raise StopIteration()
nextPageToken = None
if self._nextObject is not None:
start = self._getStart(self._nextObject)
# If start > the search anchor, move the search anchor. Otherwise,
# increment the distance from the anchor.
if start > self._searchAnchor:
self._searchAnchor = start
self._distanceFromAnchor = 0
else:
self._distanceFromAnchor += 1
nextPageToken = "{}:{}".format(
self._searchAnchor, self._distanceFromAnchor)
ret = self._extractProtocolObject(self._currentObject), nextPageToken
self._currentObject = self._nextObject
self._nextObject = next(self._searchIterator, None)
return ret | python | def next(self):
"""
Returns the next (object, nextPageToken) pair.
"""
if self._currentObject is None:
raise StopIteration()
nextPageToken = None
if self._nextObject is not None:
start = self._getStart(self._nextObject)
# If start > the search anchor, move the search anchor. Otherwise,
# increment the distance from the anchor.
if start > self._searchAnchor:
self._searchAnchor = start
self._distanceFromAnchor = 0
else:
self._distanceFromAnchor += 1
nextPageToken = "{}:{}".format(
self._searchAnchor, self._distanceFromAnchor)
ret = self._extractProtocolObject(self._currentObject), nextPageToken
self._currentObject = self._nextObject
self._nextObject = next(self._searchIterator, None)
return ret | [
"def",
"next",
"(",
"self",
")",
":",
"if",
"self",
".",
"_currentObject",
"is",
"None",
":",
"raise",
"StopIteration",
"(",
")",
"nextPageToken",
"=",
"None",
"if",
"self",
".",
"_nextObject",
"is",
"not",
"None",
":",
"start",
"=",
"self",
".",
"_getStart",
"(",
"self",
".",
"_nextObject",
")",
"# If start > the search anchor, move the search anchor. Otherwise,",
"# increment the distance from the anchor.",
"if",
"start",
">",
"self",
".",
"_searchAnchor",
":",
"self",
".",
"_searchAnchor",
"=",
"start",
"self",
".",
"_distanceFromAnchor",
"=",
"0",
"else",
":",
"self",
".",
"_distanceFromAnchor",
"+=",
"1",
"nextPageToken",
"=",
"\"{}:{}\"",
".",
"format",
"(",
"self",
".",
"_searchAnchor",
",",
"self",
".",
"_distanceFromAnchor",
")",
"ret",
"=",
"self",
".",
"_extractProtocolObject",
"(",
"self",
".",
"_currentObject",
")",
",",
"nextPageToken",
"self",
".",
"_currentObject",
"=",
"self",
".",
"_nextObject",
"self",
".",
"_nextObject",
"=",
"next",
"(",
"self",
".",
"_searchIterator",
",",
"None",
")",
"return",
"ret"
] | Returns the next (object, nextPageToken) pair. | [
"Returns",
"the",
"next",
"(",
"object",
"nextPageToken",
")",
"pair",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/paging.py#L134-L155 | train |
ga4gh/ga4gh-server | ga4gh/server/paging.py | VariantAnnotationsIntervalIterator.filterVariantAnnotation | def filterVariantAnnotation(self, vann):
"""
Returns true when an annotation should be included.
"""
# TODO reintroduce feature ID search
ret = False
if len(self._effects) != 0 and not vann.transcript_effects:
return False
elif len(self._effects) == 0:
return True
for teff in vann.transcript_effects:
if self.filterEffect(teff):
ret = True
return ret | python | def filterVariantAnnotation(self, vann):
"""
Returns true when an annotation should be included.
"""
# TODO reintroduce feature ID search
ret = False
if len(self._effects) != 0 and not vann.transcript_effects:
return False
elif len(self._effects) == 0:
return True
for teff in vann.transcript_effects:
if self.filterEffect(teff):
ret = True
return ret | [
"def",
"filterVariantAnnotation",
"(",
"self",
",",
"vann",
")",
":",
"# TODO reintroduce feature ID search",
"ret",
"=",
"False",
"if",
"len",
"(",
"self",
".",
"_effects",
")",
"!=",
"0",
"and",
"not",
"vann",
".",
"transcript_effects",
":",
"return",
"False",
"elif",
"len",
"(",
"self",
".",
"_effects",
")",
"==",
"0",
":",
"return",
"True",
"for",
"teff",
"in",
"vann",
".",
"transcript_effects",
":",
"if",
"self",
".",
"filterEffect",
"(",
"teff",
")",
":",
"ret",
"=",
"True",
"return",
"ret"
] | Returns true when an annotation should be included. | [
"Returns",
"true",
"when",
"an",
"annotation",
"should",
"be",
"included",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/paging.py#L246-L259 | train |
ga4gh/ga4gh-server | ga4gh/server/paging.py | VariantAnnotationsIntervalIterator.filterEffect | def filterEffect(self, teff):
"""
Returns true when any of the transcript effects
are present in the request.
"""
ret = False
for effect in teff.effects:
ret = self._matchAnyEffects(effect) or ret
return ret | python | def filterEffect(self, teff):
"""
Returns true when any of the transcript effects
are present in the request.
"""
ret = False
for effect in teff.effects:
ret = self._matchAnyEffects(effect) or ret
return ret | [
"def",
"filterEffect",
"(",
"self",
",",
"teff",
")",
":",
"ret",
"=",
"False",
"for",
"effect",
"in",
"teff",
".",
"effects",
":",
"ret",
"=",
"self",
".",
"_matchAnyEffects",
"(",
"effect",
")",
"or",
"ret",
"return",
"ret"
] | Returns true when any of the transcript effects
are present in the request. | [
"Returns",
"true",
"when",
"any",
"of",
"the",
"transcript",
"effects",
"are",
"present",
"in",
"the",
"request",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/paging.py#L261-L269 | train |
ga4gh/ga4gh-server | ga4gh/server/paging.py | VariantAnnotationsIntervalIterator._checkIdEquality | def _checkIdEquality(self, requestedEffect, effect):
"""
Tests whether a requested effect and an effect
present in an annotation are equal.
"""
return self._idPresent(requestedEffect) and (
effect.term_id == requestedEffect.term_id) | python | def _checkIdEquality(self, requestedEffect, effect):
"""
Tests whether a requested effect and an effect
present in an annotation are equal.
"""
return self._idPresent(requestedEffect) and (
effect.term_id == requestedEffect.term_id) | [
"def",
"_checkIdEquality",
"(",
"self",
",",
"requestedEffect",
",",
"effect",
")",
":",
"return",
"self",
".",
"_idPresent",
"(",
"requestedEffect",
")",
"and",
"(",
"effect",
".",
"term_id",
"==",
"requestedEffect",
".",
"term_id",
")"
] | Tests whether a requested effect and an effect
present in an annotation are equal. | [
"Tests",
"whether",
"a",
"requested",
"effect",
"and",
"an",
"effect",
"present",
"in",
"an",
"annotation",
"are",
"equal",
"."
] | 1aa18922ef136db8604f6f098cb1732cba6f2a76 | https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/paging.py#L271-L277 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.