text stringlengths 81 112k |
|---|
Enters the given node. Creates it if it does not exist.
Returns the node.
def enter(self, path):
"""
Enters the given node. Creates it if it does not exist.
Returns the node.
"""
self.current.append(self.add(path))
return self.current[-1] |
Given a converter (as returned by compile()), this function reads
the given input file and converts it to the requested output format.
Supported output formats are 'xml', 'yaml', 'json', or 'none'.
:type converter: compiler.Context
:param converter: The compiled converter.
:type input_file: str
:param input_file: Name of a file to convert.
:type format: str
:param format: The output format.
:type encoding: str
:param encoding: Character encoding of the input file.
:rtype: str
:return: The resulting output.
def generate(converter, input_file, format='xml', encoding='utf8'):
"""
Given a converter (as returned by compile()), this function reads
the given input file and converts it to the requested output format.
Supported output formats are 'xml', 'yaml', 'json', or 'none'.
:type converter: compiler.Context
:param converter: The compiled converter.
:type input_file: str
:param input_file: Name of a file to convert.
:type format: str
:param format: The output format.
:type encoding: str
:param encoding: Character encoding of the input file.
:rtype: str
:return: The resulting output.
"""
with codecs.open(input_file, encoding=encoding) as thefile:
return generate_string(converter, thefile.read(), format=format) |
Like generate(), but writes the output to the given output file
instead.
:type converter: compiler.Context
:param converter: The compiled converter.
:type input_file: str
:param input_file: Name of a file to convert.
:type output_file: str
:param output_file: The output filename.
:type format: str
:param format: The output format.
:type in_encoding: str
:param in_encoding: Character encoding of the input file.
:type out_encoding: str
:param out_encoding: Character encoding of the output file.
:rtype: str
:return: The resulting output.
def generate_to_file(converter,
input_file,
output_file,
format='xml',
in_encoding='utf8',
out_encoding='utf8'):
"""
Like generate(), but writes the output to the given output file
instead.
:type converter: compiler.Context
:param converter: The compiled converter.
:type input_file: str
:param input_file: Name of a file to convert.
:type output_file: str
:param output_file: The output filename.
:type format: str
:param format: The output format.
:type in_encoding: str
:param in_encoding: Character encoding of the input file.
:type out_encoding: str
:param out_encoding: Character encoding of the output file.
:rtype: str
:return: The resulting output.
"""
with codecs.open(output_file, 'w', encoding=out_encoding) as thefile:
result = generate(converter, input_file, format=format, encoding=in_encoding)
thefile.write(result) |
Like generate(), but reads the input from a string instead of
from a file.
:type converter: compiler.Context
:param converter: The compiled converter.
:type input: str
:param input: The string to convert.
:type format: str
:param format: The output format.
:rtype: str
:return: The resulting output.
def generate_string(converter, input, format='xml'):
"""
Like generate(), but reads the input from a string instead of
from a file.
:type converter: compiler.Context
:param converter: The compiled converter.
:type input: str
:param input: The string to convert.
:type format: str
:param format: The output format.
:rtype: str
:return: The resulting output.
"""
serializer = generator.new(format)
if serializer is None:
raise TypeError('invalid output format ' + repr(format))
builder = Builder()
converter.parse_string(input, builder)
return builder.serialize(serializer) |
Like generate(), but reads the input from a string instead of
from a file, and writes the output to the given output file.
:type converter: compiler.Context
:param converter: The compiled converter.
:type input: str
:param input: The string to convert.
:type output_file: str
:param output_file: The output filename.
:type format: str
:param format: The output format.
:type out_encoding: str
:param out_encoding: Character encoding of the output file.
:rtype: str
:return: The resulting output.
def generate_string_to_file(converter,
input,
output_file,
format='xml',
out_encoding='utf8'):
"""
Like generate(), but reads the input from a string instead of
from a file, and writes the output to the given output file.
:type converter: compiler.Context
:param converter: The compiled converter.
:type input: str
:param input: The string to convert.
:type output_file: str
:param output_file: The output filename.
:type format: str
:param format: The output format.
:type out_encoding: str
:param out_encoding: Character encoding of the output file.
:rtype: str
:return: The resulting output.
"""
with codecs.open(output_file, 'w', encoding=out_encoding) as thefile:
result = generate_string(converter, input, format=format)
thefile.write(result) |
A very simple cron-like parser to determine, if (cron-like) string is valid for this date and time.
@input:
s = cron-like string (minute, hour, day of month, month, day of week)
dt = datetime to use as reference time, defaults to now
@output: boolean of result
def is_now(s, dt=None):
'''
A very simple cron-like parser to determine, if (cron-like) string is valid for this date and time.
@input:
s = cron-like string (minute, hour, day of month, month, day of week)
dt = datetime to use as reference time, defaults to now
@output: boolean of result
'''
if dt is None:
dt = datetime.now()
minute, hour, dom, month, dow = s.split(' ')
weekday = dt.isoweekday()
return _parse_arg(minute, dt.minute) \
and _parse_arg(hour, dt.hour) \
and _parse_arg(dom, dt.day) \
and _parse_arg(month, dt.month) \
and _parse_arg(dow, 0 if weekday == 7 else weekday, True) |
A parser to check whether a (cron-like) string has been true during a certain time period.
Useful for applications which cannot check every minute or need to catch up during a restart.
@input:
s = cron-like string (minute, hour, day of month, month, day of week)
since = datetime to use as reference time for start of period
dt = datetime to use as reference time for end of period, defaults to now
@output: boolean of result
def has_been(s, since, dt=None):
'''
A parser to check whether a (cron-like) string has been true during a certain time period.
Useful for applications which cannot check every minute or need to catch up during a restart.
@input:
s = cron-like string (minute, hour, day of month, month, day of week)
since = datetime to use as reference time for start of period
dt = datetime to use as reference time for end of period, defaults to now
@output: boolean of result
'''
if dt is None:
dt = datetime.now(tz=since.tzinfo)
if dt < since:
raise ValueError("The since datetime must be before the current datetime.")
while since <= dt:
if is_now(s, since):
return True
since += timedelta(minutes=1)
return False |
Area under the precision-recall curve
def auprc(y_true, y_pred):
"""Area under the precision-recall curve
"""
y_true, y_pred = _mask_value_nan(y_true, y_pred)
return skm.average_precision_score(y_true, y_pred) |
Get tid of the best trial
rank=0 means the best model
rank=1 means second best
...
def best_trial_tid(self, rank=0):
"""Get tid of the best trial
rank=0 means the best model
rank=1 means second best
...
"""
candidates = [t for t in self.trials
if t['result']['status'] == STATUS_OK]
if len(candidates) == 0:
return None
losses = [float(t['result']['loss']) for t in candidates]
assert not np.any(np.isnan(losses))
lid = np.where(np.argsort(losses).argsort() == rank)[0][0]
return candidates[lid]["tid"] |
Extends the original object in order to inject checking
for stalled jobs and killing them if they are running for too long
def count_by_state_unsynced(self, arg):
"""Extends the original object in order to inject checking
for stalled jobs and killing them if they are running for too long
"""
if self.kill_timeout is not None:
self.delete_running(self.kill_timeout)
return super(KMongoTrials, self).count_by_state_unsynced(arg) |
Plot the loss curves
def plot_history(self, tid, scores=["loss", "f1", "accuracy"],
figsize=(15, 3)):
"""Plot the loss curves"""
history = self.train_history(tid)
import matplotlib.pyplot as plt
fig = plt.figure(figsize=figsize)
for i, score in enumerate(scores):
plt.subplot(1, len(scores), i + 1)
plt.tight_layout()
plt.plot(history[score], label="train")
plt.plot(history['val_' + score], label="validation")
plt.title(score)
plt.ylabel(score)
plt.xlabel('epoch')
plt.legend(loc='best')
return fig |
Load saved keras model of the trial.
If tid = None, get the best model
Not applicable for trials ran in cross validion (i.e. not applicable
for `CompileFN.cv_n_folds is None`
def load_model(self, tid, custom_objects=None):
"""Load saved keras model of the trial.
If tid = None, get the best model
Not applicable for trials ran in cross validion (i.e. not applicable
for `CompileFN.cv_n_folds is None`
"""
if tid is None:
tid = self.best_trial_tid()
model_path = self.get_trial(tid)["result"]["path"]["model"]
return load_model(model_path, custom_objects=custom_objects) |
Number of ok trials()
def n_ok(self):
"""Number of ok trials()
"""
if len(self.trials) == 0:
return 0
else:
return np.sum(np.array(self.statuses()) == "ok") |
Return a list of results with ok status
def get_ok_results(self, verbose=True):
"""Return a list of results with ok status
"""
if len(self.trials) == 0:
return []
not_ok = np.where(np.array(self.statuses()) != "ok")[0]
if len(not_ok) > 0 and verbose:
print("{0}/{1} trials were not ok.".format(len(not_ok), len(self.trials)))
print("Trials: " + str(not_ok))
print("Statuses: " + str(np.array(self.statuses())[not_ok]))
r = [merge_dicts({"tid": t["tid"]}, t["result"].to_dict())
for t in self.trials if t["result"]["status"] == "ok"]
return r |
Common wrapper for the authentication modules.
* Parses the request before passing it on to the authentication module.
* Sets 'pyoidc' cookie if authentication succeeds.
* Redirects the user to complete the authentication.
* Allows the user to retry authentication if it fails.
:param verifier: authentication module
def VerifierMiddleware(verifier):
"""Common wrapper for the authentication modules.
* Parses the request before passing it on to the authentication module.
* Sets 'pyoidc' cookie if authentication succeeds.
* Redirects the user to complete the authentication.
* Allows the user to retry authentication if it fails.
:param verifier: authentication module
"""
@wraps(verifier.verify)
def wrapper(environ, start_response):
data = get_post(environ)
kwargs = dict(urlparse.parse_qsl(data))
kwargs["state"] = json.loads(urllib.unquote(kwargs["state"]))
val, completed = verifier.verify(**kwargs)
if not completed:
return val(environ, start_response)
if val:
set_cookie, cookie_value = verifier.create_cookie(val, "auth")
cookie_value += "; path=/"
url = "{base_url}?{query_string}".format(
base_url="/authorization",
query_string=kwargs["state"]["query"])
response = SeeOther(url, headers=[(set_cookie, cookie_value)])
return response(environ, start_response)
else: # Unsuccessful authentication
url = "{base_url}?{query_string}".format(
base_url="/authorization",
query_string=kwargs["state"]["query"])
response = SeeOther(url)
return response(environ, start_response)
return wrapper |
Common wrapper for the underlying pyoidc library functions.
Reads GET params and POST data before passing it on the library and
converts the response from oic.utils.http_util to wsgi.
:param func: underlying library function
def pyoidcMiddleware(func):
"""Common wrapper for the underlying pyoidc library functions.
Reads GET params and POST data before passing it on the library and
converts the response from oic.utils.http_util to wsgi.
:param func: underlying library function
"""
def wrapper(environ, start_response):
data = get_or_post(environ)
cookies = environ.get("HTTP_COOKIE", "")
resp = func(request=data, cookie=cookies)
return resp(environ, start_response)
return wrapper |
Convert an oic.utils.http_util instance to Flask.
def resp2flask(resp):
"""Convert an oic.utils.http_util instance to Flask."""
if isinstance(resp, Redirect) or isinstance(resp, SeeOther):
code = int(resp.status.split()[0])
raise cherrypy.HTTPRedirect(resp.message, code)
return resp.message, resp.status, resp.headers |
Add all authentication methods specified in the configuration.
def setup_authentication_methods(authn_config, template_env):
"""Add all authentication methods specified in the configuration."""
routing = {}
ac = AuthnBroker()
for authn_method in authn_config:
cls = make_cls_from_name(authn_method["class"])
instance = cls(template_env=template_env, **authn_method["kwargs"])
ac.add(authn_method["acr"], instance)
routing[instance.url_endpoint] = VerifierMiddleware(instance)
return ac, routing |
Setup the OpenID Connect Provider endpoints.
def setup_endpoints(provider):
"""Setup the OpenID Connect Provider endpoints."""
app_routing = {}
endpoints = [
AuthorizationEndpoint(
pyoidcMiddleware(provider.authorization_endpoint)),
TokenEndpoint(
pyoidcMiddleware(provider.token_endpoint)),
UserinfoEndpoint(
pyoidcMiddleware(provider.userinfo_endpoint)),
RegistrationEndpoint(
pyoidcMiddleware(provider.registration_endpoint)),
EndSessionEndpoint(
pyoidcMiddleware(provider.endsession_endpoint))
]
for ep in endpoints:
app_routing["/{}".format(ep.etype)] = ep
return app_routing |
Handle webfinger requests.
def _webfinger(provider, request, **kwargs):
"""Handle webfinger requests."""
params = urlparse.parse_qs(request)
if params["rel"][0] == OIC_ISSUER:
wf = WebFinger()
return Response(wf.response(params["resource"][0], provider.baseurl),
headers=[("Content-Type", "application/jrd+json")])
else:
return BadRequest("Incorrect webfinger.") |
Converts a dictionary of keyword arguments into a tuple
of SQL select statements and the list of SQL arguments
def featuresQuery(self, **kwargs):
"""
Converts a dictionary of keyword arguments into a tuple
of SQL select statements and the list of SQL arguments
"""
# TODO: Optimize by refactoring out string concatenation
sql = ""
sql_rows = "SELECT * FROM FEATURE WHERE id > 1 "
sql_args = ()
if 'name' in kwargs and kwargs['name']:
sql += "AND name = ? "
sql_args += (kwargs.get('name'),)
if 'geneSymbol' in kwargs and kwargs['geneSymbol']:
sql += "AND gene_name = ? "
sql_args += (kwargs.get('geneSymbol'),)
if 'start' in kwargs and kwargs['start'] is not None:
sql += "AND end > ? "
sql_args += (kwargs.get('start'),)
if 'end' in kwargs and kwargs['end'] is not None:
sql += "AND start < ? "
sql_args += (kwargs.get('end'),)
if 'referenceName' in kwargs and kwargs['referenceName']:
sql += "AND reference_name = ?"
sql_args += (kwargs.get('referenceName'),)
if 'parentId' in kwargs and kwargs['parentId']:
sql += "AND parent_id = ? "
sql_args += (kwargs['parentId'],)
if kwargs.get('featureTypes') is not None \
and len(kwargs['featureTypes']) > 0:
sql += "AND type IN ("
sql += ", ".join(["?", ] * len(kwargs.get('featureTypes')))
sql += ") "
sql_args += tuple(kwargs.get('featureTypes'))
sql_rows += sql
sql_rows += " ORDER BY reference_name, start, end ASC "
return sql_rows, sql_args |
Perform a full features query in database.
:param startIndex: int representing first record to return
:param maxResults: int representing number of records to return
:param referenceName: string representing reference name, ex 'chr1'
:param start: int position on reference to start search
:param end: int position on reference to end search >= start
:param parentId: string restrict search by id of parent node.
:param name: match features by name
:param geneSymbol: match features by gene symbol
:return an array of dictionaries, representing the returned data.
def searchFeaturesInDb(
self, startIndex=0, maxResults=None,
referenceName=None, start=None, end=None,
parentId=None, featureTypes=None,
name=None, geneSymbol=None):
"""
Perform a full features query in database.
:param startIndex: int representing first record to return
:param maxResults: int representing number of records to return
:param referenceName: string representing reference name, ex 'chr1'
:param start: int position on reference to start search
:param end: int position on reference to end search >= start
:param parentId: string restrict search by id of parent node.
:param name: match features by name
:param geneSymbol: match features by gene symbol
:return an array of dictionaries, representing the returned data.
"""
# TODO: Refactor out common bits of this and the above count query.
sql, sql_args = self.featuresQuery(
startIndex=startIndex, maxResults=maxResults,
referenceName=referenceName, start=start, end=end,
parentId=parentId, featureTypes=featureTypes,
name=name, geneSymbol=geneSymbol)
sql += sqlite_backend.limitsSql(startIndex, maxResults)
query = self._dbconn.execute(sql, sql_args)
return sqlite_backend.sqliteRowsToDicts(query.fetchall()) |
Fetch feature by featureID.
:param featureId: the FeatureID as found in GFF3 records
:return: dictionary representing a feature object,
or None if no match is found.
def getFeatureById(self, featureId):
"""
Fetch feature by featureID.
:param featureId: the FeatureID as found in GFF3 records
:return: dictionary representing a feature object,
or None if no match is found.
"""
sql = "SELECT * FROM FEATURE WHERE id = ?"
query = self._dbconn.execute(sql, (featureId,))
ret = query.fetchone()
if ret is None:
return None
return sqlite_backend.sqliteRowToDict(ret) |
Returns the representation of this FeatureSet as the corresponding
ProtocolElement.
def toProtocolElement(self):
"""
Returns the representation of this FeatureSet as the corresponding
ProtocolElement.
"""
gaFeatureSet = protocol.FeatureSet()
gaFeatureSet.id = self.getId()
gaFeatureSet.dataset_id = self.getParentContainer().getId()
gaFeatureSet.reference_set_id = pb.string(self._referenceSet.getId())
gaFeatureSet.name = self._name
gaFeatureSet.source_uri = self._sourceUri
attributes = self.getAttributes()
for key in attributes:
gaFeatureSet.attributes.attr[key] \
.values.extend(protocol.encodeValue(attributes[key]))
return gaFeatureSet |
Returns server-style compound ID for an internal featureId.
:param long featureId: id of feature in database
:return: string representing ID for the specified GA4GH protocol
Feature object in this FeatureSet.
def getCompoundIdForFeatureId(self, featureId):
"""
Returns server-style compound ID for an internal featureId.
:param long featureId: id of feature in database
:return: string representing ID for the specified GA4GH protocol
Feature object in this FeatureSet.
"""
if featureId is not None and featureId != "":
compoundId = datamodel.FeatureCompoundId(
self.getCompoundId(), str(featureId))
else:
compoundId = ""
return str(compoundId) |
Fetches a simulated feature by ID.
:param compoundId: any non-null string
:return: A simulated feature with id set to the same value as the
passed-in compoundId.
":raises: exceptions.ObjectWithIdNotFoundException if None is passed
in for the compoundId.
def getFeature(self, compoundId):
"""
Fetches a simulated feature by ID.
:param compoundId: any non-null string
:return: A simulated feature with id set to the same value as the
passed-in compoundId.
":raises: exceptions.ObjectWithIdNotFoundException if None is passed
in for the compoundId.
"""
if compoundId is None:
raise exceptions.ObjectWithIdNotFoundException(compoundId)
randomNumberGenerator = random.Random()
randomNumberGenerator.seed(self._randomSeed)
feature = self._generateSimulatedFeature(randomNumberGenerator)
feature.id = str(compoundId)
feature.parent_id = "" # TODO: Test with nonempty parentIDs?
return feature |
Returns a set number of simulated features.
:param referenceName: name of reference to "search" on
:param start: start coordinate of query
:param end: end coordinate of query
:param startIndex: None or int
:param maxResults: None or int
:param featureTypes: optional list of ontology terms to limit query
:param parentId: optional parentId to limit query.
:param name: the name of the feature
:param geneSymbol: the symbol for the gene the features are on
:param numFeatures: number of features to generate in the return.
10 is a reasonable (if arbitrary) default.
:return: Yields feature list
def getFeatures(self, referenceName=None, start=None, end=None,
startIndex=None, maxResults=None,
featureTypes=None, parentId=None,
name=None, geneSymbol=None, numFeatures=10):
"""
Returns a set number of simulated features.
:param referenceName: name of reference to "search" on
:param start: start coordinate of query
:param end: end coordinate of query
:param startIndex: None or int
:param maxResults: None or int
:param featureTypes: optional list of ontology terms to limit query
:param parentId: optional parentId to limit query.
:param name: the name of the feature
:param geneSymbol: the symbol for the gene the features are on
:param numFeatures: number of features to generate in the return.
10 is a reasonable (if arbitrary) default.
:return: Yields feature list
"""
randomNumberGenerator = random.Random()
randomNumberGenerator.seed(self._randomSeed)
for featureId in range(numFeatures):
gaFeature = self._generateSimulatedFeature(randomNumberGenerator)
gaFeature.id = self.getCompoundIdForFeatureId(featureId)
match = (
gaFeature.start < end and
gaFeature.end > start and
gaFeature.reference_name == referenceName and (
featureTypes is None or len(featureTypes) == 0 or
gaFeature.feature_type in featureTypes))
if match:
gaFeature.parent_id = "" # TODO: Test nonempty parentIDs?
yield gaFeature |
Populates the instance variables of this FeatureSet from the specified
data URL.
def populateFromFile(self, dataUrl):
"""
Populates the instance variables of this FeatureSet from the specified
data URL.
"""
self._dbFilePath = dataUrl
self._db = Gff3DbBackend(self._dbFilePath) |
Populates the instance variables of this FeatureSet from the specified
DB row.
def populateFromRow(self, featureSetRecord):
"""
Populates the instance variables of this FeatureSet from the specified
DB row.
"""
self._dbFilePath = featureSetRecord.dataurl
self.setAttributesJson(featureSetRecord.attributes)
self._db = Gff3DbBackend(self._dbFilePath) |
Returns a protocol.Feature object corresponding to a compoundId
:param compoundId: a datamodel.FeatureCompoundId object
:return: a Feature object.
:raises: exceptions.ObjectWithIdNotFoundException if invalid
compoundId is provided.
def getFeature(self, compoundId):
"""
Returns a protocol.Feature object corresponding to a compoundId
:param compoundId: a datamodel.FeatureCompoundId object
:return: a Feature object.
:raises: exceptions.ObjectWithIdNotFoundException if invalid
compoundId is provided.
"""
featureId = long(compoundId.featureId)
with self._db as dataSource:
featureReturned = dataSource.getFeatureById(featureId)
if featureReturned is None:
raise exceptions.ObjectWithIdNotFoundException(compoundId)
else:
gaFeature = self._gaFeatureForFeatureDbRecord(featureReturned)
return gaFeature |
:param feature: The DB Row representing a feature
:return: the corresponding GA4GH protocol.Feature object
def _gaFeatureForFeatureDbRecord(self, feature):
"""
:param feature: The DB Row representing a feature
:return: the corresponding GA4GH protocol.Feature object
"""
gaFeature = protocol.Feature()
gaFeature.id = self.getCompoundIdForFeatureId(feature['id'])
if feature.get('parent_id'):
gaFeature.parent_id = self.getCompoundIdForFeatureId(
feature['parent_id'])
else:
gaFeature.parent_id = ""
gaFeature.feature_set_id = self.getId()
gaFeature.reference_name = pb.string(feature.get('reference_name'))
gaFeature.start = pb.int(feature.get('start'))
gaFeature.end = pb.int(feature.get('end'))
gaFeature.name = pb.string(feature.get('name'))
if feature.get('strand', '') == '-':
gaFeature.strand = protocol.NEG_STRAND
else:
# default to positive strand
gaFeature.strand = protocol.POS_STRAND
gaFeature.child_ids.extend(map(
self.getCompoundIdForFeatureId,
json.loads(feature['child_ids'])))
gaFeature.feature_type.CopyFrom(
self._ontology.getGaTermByName(feature['type']))
attributes = json.loads(feature['attributes'])
# TODO: Identify which values are ExternalIdentifiers and OntologyTerms
for key in attributes:
for v in attributes[key]:
gaFeature.attributes.attr[key].values.add().string_value = v
if 'gene_name' in attributes and len(attributes['gene_name']) > 0:
gaFeature.gene_symbol = pb.string(attributes['gene_name'][0])
return gaFeature |
method passed to runSearchRequest to fulfill the request
:param str referenceName: name of reference (ex: "chr1")
:param start: castable to int, start position on reference
:param end: castable to int, end position on reference
:param startIndex: none or castable to int
:param maxResults: none or castable to int
:param featureTypes: array of str
:param parentId: none or featureID of parent
:param name: the name of the feature
:param geneSymbol: the symbol for the gene the features are on
:return: yields a protocol.Feature at a time
def getFeatures(self, referenceName=None, start=None, end=None,
startIndex=None, maxResults=None,
featureTypes=None, parentId=None,
name=None, geneSymbol=None):
"""
method passed to runSearchRequest to fulfill the request
:param str referenceName: name of reference (ex: "chr1")
:param start: castable to int, start position on reference
:param end: castable to int, end position on reference
:param startIndex: none or castable to int
:param maxResults: none or castable to int
:param featureTypes: array of str
:param parentId: none or featureID of parent
:param name: the name of the feature
:param geneSymbol: the symbol for the gene the features are on
:return: yields a protocol.Feature at a time
"""
with self._db as dataSource:
features = dataSource.searchFeaturesInDb(
startIndex, maxResults,
referenceName=referenceName,
start=start, end=end,
parentId=parentId, featureTypes=featureTypes,
name=name, geneSymbol=geneSymbol)
for feature in features:
gaFeature = self._gaFeatureForFeatureDbRecord(feature)
yield gaFeature |
Add an rnaQuantification to this rnaQuantificationSet
def addRnaQuantification(self, rnaQuantification):
"""
Add an rnaQuantification to this rnaQuantificationSet
"""
id_ = rnaQuantification.getId()
self._rnaQuantificationIdMap[id_] = rnaQuantification
self._rnaQuantificationIds.append(id_) |
Converts this rnaQuant into its GA4GH protocol equivalent.
def toProtocolElement(self):
"""
Converts this rnaQuant into its GA4GH protocol equivalent.
"""
protocolElement = protocol.RnaQuantificationSet()
protocolElement.id = self.getId()
protocolElement.dataset_id = self._parentContainer.getId()
protocolElement.name = self._name
self.serializeAttributes(protocolElement)
return protocolElement |
Populates the instance variables of this RnaQuantificationSet from the
specified data URL.
def populateFromFile(self, dataUrl):
"""
Populates the instance variables of this RnaQuantificationSet from the
specified data URL.
"""
self._dbFilePath = dataUrl
self._db = SqliteRnaBackend(self._dbFilePath)
self.addRnaQuants() |
Populates the instance variables of this RnaQuantificationSet from the
specified DB row.
def populateFromRow(self, quantificationSetRecord):
"""
Populates the instance variables of this RnaQuantificationSet from the
specified DB row.
"""
self._dbFilePath = quantificationSetRecord.dataurl
self.setAttributesJson(quantificationSetRecord.attributes)
self._db = SqliteRnaBackend(self._dbFilePath)
self.addRnaQuants() |
Converts this rnaQuant into its GA4GH protocol equivalent.
def toProtocolElement(self):
"""
Converts this rnaQuant into its GA4GH protocol equivalent.
"""
protocolElement = protocol.RnaQuantification()
protocolElement.id = self.getId()
protocolElement.name = self._name
protocolElement.description = self._description
protocolElement.read_group_ids.extend(self._readGroupIds)
protocolElement.programs.extend(self._programs)
protocolElement.biosample_id = self._biosampleId
protocolElement.feature_set_ids.extend(self._featureSetIds)
protocolElement.rna_quantification_set_id = \
self._parentContainer.getId()
self.serializeAttributes(protocolElement)
return protocolElement |
data elements are:
Id, annotations, description, name, readGroupId
where annotations is a comma separated list
def addRnaQuantMetadata(self, fields):
"""
data elements are:
Id, annotations, description, name, readGroupId
where annotations is a comma separated list
"""
self._featureSetIds = fields["feature_set_ids"].split(',')
self._description = fields["description"]
self._name = fields["name"]
self._biosampleId = fields.get("biosample_id", "")
if fields["read_group_ids"] == "":
self._readGroupIds = []
else:
self._readGroupIds = fields["read_group_ids"].split(',')
if fields["programs"] == "":
self._programs = []
else:
# Need to use program Id's here to generate a list of Programs
# for now set to empty
self._programs = [] |
input is tab file with no header. Columns are:
Id, annotations, description, name, readGroupId
where annotation is a comma separated list
def getRnaQuantMetadata(self):
"""
input is tab file with no header. Columns are:
Id, annotations, description, name, readGroupId
where annotation is a comma separated list
"""
rnaQuantId = self.getLocalId()
with self._db as dataSource:
rnaQuantReturned = dataSource.getRnaQuantificationById(
rnaQuantId)
self.addRnaQuantMetadata(rnaQuantReturned) |
Populates the instance variables of this FeatureSet from the specified
data URL.
def populateFromFile(self, dataUrl):
"""
Populates the instance variables of this FeatureSet from the specified
data URL.
"""
self._dbFilePath = dataUrl
self._db = SqliteRnaBackend(self._dbFilePath)
self.getRnaQuantMetadata() |
Populates the instance variables of this FeatureSet from the specified
DB row.
def populateFromRow(self, row):
"""
Populates the instance variables of this FeatureSet from the specified
DB row.
"""
self._dbFilePath = row[b'dataUrl']
self._db = SqliteRnaBackend(self._dbFilePath)
self.getRnaQuantMetadata() |
Returns the list of ExpressionLevels in this RNA Quantification.
def getExpressionLevels(
self, threshold=0.0, names=[], startIndex=0, maxResults=0):
"""
Returns the list of ExpressionLevels in this RNA Quantification.
"""
rnaQuantificationId = self.getLocalId()
with self._db as dataSource:
expressionsReturned = dataSource.searchExpressionLevelsInDb(
rnaQuantificationId,
names=names,
threshold=threshold,
startIndex=startIndex,
maxResults=maxResults)
expressionLevels = [
SqliteExpressionLevel(self, expressionEntry) for
expressionEntry in expressionsReturned]
return expressionLevels |
:param rnaQuantificationId: string restrict search by id
:return an array of dictionaries, representing the returned data.
def searchRnaQuantificationsInDb(
self, rnaQuantificationId=""):
"""
:param rnaQuantificationId: string restrict search by id
:return an array of dictionaries, representing the returned data.
"""
sql = ("SELECT * FROM RnaQuantification")
sql_args = ()
if len(rnaQuantificationId) > 0:
sql += " WHERE id = ? "
sql_args += (rnaQuantificationId,)
query = self._dbconn.execute(sql, sql_args)
try:
return sqlite_backend.iterativeFetch(query)
except AttributeError:
raise exceptions.RnaQuantificationNotFoundException(
rnaQuantificationId) |
:param rnaQuantificationId: the RNA Quantification ID
:return: dictionary representing an RnaQuantification object,
or None if no match is found.
def getRnaQuantificationById(self, rnaQuantificationId):
"""
:param rnaQuantificationId: the RNA Quantification ID
:return: dictionary representing an RnaQuantification object,
or None if no match is found.
"""
sql = ("SELECT * FROM RnaQuantification WHERE id = ?")
query = self._dbconn.execute(sql, (rnaQuantificationId,))
try:
return sqlite_backend.fetchOne(query)
except AttributeError:
raise exceptions.RnaQuantificationNotFoundException(
rnaQuantificationId) |
:param rnaQuantId: string restrict search by quantification id
:param threshold: float minimum expression values to return
:return an array of dictionaries, representing the returned data.
def searchExpressionLevelsInDb(
self, rnaQuantId, names=[], threshold=0.0, startIndex=0,
maxResults=0):
"""
:param rnaQuantId: string restrict search by quantification id
:param threshold: float minimum expression values to return
:return an array of dictionaries, representing the returned data.
"""
sql = ("SELECT * FROM Expression WHERE "
"rna_quantification_id = ? "
"AND expression > ? ")
sql_args = (rnaQuantId, threshold)
if len(names) > 0:
sql += "AND name in ("
sql += ",".join(['?' for name in names])
sql += ") "
for name in names:
sql_args += (name,)
sql += sqlite_backend.limitsSql(
startIndex=startIndex, maxResults=maxResults)
query = self._dbconn.execute(sql, sql_args)
return sqlite_backend.iterativeFetch(query) |
:param expressionId: the ExpressionLevel ID
:return: dictionary representing an ExpressionLevel object,
or None if no match is found.
def getExpressionLevelById(self, expressionId):
"""
:param expressionId: the ExpressionLevel ID
:return: dictionary representing an ExpressionLevel object,
or None if no match is found.
"""
sql = ("SELECT * FROM Expression WHERE id = ?")
query = self._dbconn.execute(sql, (expressionId,))
try:
return sqlite_backend.fetchOne(query)
except AttributeError:
raise exceptions.ExpressionLevelNotFoundException(
expressionId) |
Populates this CallSet from the specified DB row.
def populateFromRow(self, callSetRecord):
"""
Populates this CallSet from the specified DB row.
"""
self._biosampleId = callSetRecord.biosampleid
self.setAttributesJson(callSetRecord.attributes) |
Returns the representation of this CallSet as the corresponding
ProtocolElement.
def toProtocolElement(self):
"""
Returns the representation of this CallSet as the corresponding
ProtocolElement.
"""
variantSet = self.getParentContainer()
gaCallSet = protocol.CallSet(
biosample_id=self.getBiosampleId())
if variantSet.getCreationTime():
gaCallSet.created = variantSet.getCreationTime()
if variantSet.getUpdatedTime():
gaCallSet.updated = variantSet.getUpdatedTime()
gaCallSet.id = self.getId()
gaCallSet.name = self.getLocalId()
gaCallSet.variant_set_ids.append(variantSet.getId())
self.serializeAttributes(gaCallSet)
return gaCallSet |
Adds the specified variantAnnotationSet to this dataset.
def addVariantAnnotationSet(self, variantAnnotationSet):
"""
Adds the specified variantAnnotationSet to this dataset.
"""
id_ = variantAnnotationSet.getId()
self._variantAnnotationSetIdMap[id_] = variantAnnotationSet
self._variantAnnotationSetIds.append(id_) |
Returns the AnnotationSet in this dataset with the specified 'id'
def getVariantAnnotationSet(self, id_):
"""
Returns the AnnotationSet in this dataset with the specified 'id'
"""
if id_ not in self._variantAnnotationSetIdMap:
raise exceptions.AnnotationSetNotFoundException(id_)
return self._variantAnnotationSetIdMap[id_] |
Adds the specfied CallSet to this VariantSet.
def addCallSet(self, callSet):
"""
Adds the specfied CallSet to this VariantSet.
"""
callSetId = callSet.getId()
self._callSetIdMap[callSetId] = callSet
self._callSetNameMap[callSet.getLocalId()] = callSet
self._callSetIds.append(callSetId)
self._callSetIdToIndex[callSet.getId()] = len(self._callSetIds) - 1 |
Adds a CallSet for the specified sample name.
def addCallSetFromName(self, sampleName):
"""
Adds a CallSet for the specified sample name.
"""
callSet = CallSet(self, sampleName)
self.addCallSet(callSet) |
Returns a CallSet with the specified name, or raises a
CallSetNameNotFoundException if it does not exist.
def getCallSetByName(self, name):
"""
Returns a CallSet with the specified name, or raises a
CallSetNameNotFoundException if it does not exist.
"""
if name not in self._callSetNameMap:
raise exceptions.CallSetNameNotFoundException(name)
return self._callSetNameMap[name] |
Returns a CallSet with the specified id, or raises a
CallSetNotFoundException if it does not exist.
def getCallSet(self, id_):
"""
Returns a CallSet with the specified id, or raises a
CallSetNotFoundException if it does not exist.
"""
if id_ not in self._callSetIdMap:
raise exceptions.CallSetNotFoundException(id_)
return self._callSetIdMap[id_] |
Converts this VariantSet into its GA4GH protocol equivalent.
def toProtocolElement(self):
"""
Converts this VariantSet into its GA4GH protocol equivalent.
"""
protocolElement = protocol.VariantSet()
protocolElement.id = self.getId()
protocolElement.dataset_id = self.getParentContainer().getId()
protocolElement.reference_set_id = self._referenceSet.getId()
protocolElement.metadata.extend(self.getMetadata())
protocolElement.dataset_id = self.getParentContainer().getId()
protocolElement.reference_set_id = self._referenceSet.getId()
protocolElement.name = self.getLocalId()
self.serializeAttributes(protocolElement)
return protocolElement |
Convenience method to set the common fields in a GA Variant
object from this variant set.
def _createGaVariant(self):
"""
Convenience method to set the common fields in a GA Variant
object from this variant set.
"""
ret = protocol.Variant()
if self._creationTime:
ret.created = self._creationTime
if self._updatedTime:
ret.updated = self._updatedTime
ret.variant_set_id = self.getId()
return ret |
Returns an ID string suitable for the specified GA Variant
object in this variant set.
def getVariantId(self, gaVariant):
"""
Returns an ID string suitable for the specified GA Variant
object in this variant set.
"""
md5 = self.hashVariant(gaVariant)
compoundId = datamodel.VariantCompoundId(
self.getCompoundId(), gaVariant.reference_name,
str(gaVariant.start), md5)
return str(compoundId) |
Returns the callSetId for the specified sampleName in this
VariantSet.
def getCallSetId(self, sampleName):
"""
Returns the callSetId for the specified sampleName in this
VariantSet.
"""
compoundId = datamodel.CallSetCompoundId(
self.getCompoundId(), sampleName)
return str(compoundId) |
Produces an MD5 hash of the ga variant object to distinguish
it from other variants at the same genomic coordinate.
def hashVariant(cls, gaVariant):
"""
Produces an MD5 hash of the ga variant object to distinguish
it from other variants at the same genomic coordinate.
"""
hash_str = gaVariant.reference_bases + \
str(tuple(gaVariant.alternate_bases))
return hashlib.md5(hash_str).hexdigest() |
Generate a random variant for the specified position using the
specified random number generator. This generator should be seeded
with a value that is unique to this position so that the same variant
will always be produced regardless of the order it is generated in.
def generateVariant(self, referenceName, position, randomNumberGenerator):
"""
Generate a random variant for the specified position using the
specified random number generator. This generator should be seeded
with a value that is unique to this position so that the same variant
will always be produced regardless of the order it is generated in.
"""
variant = self._createGaVariant()
variant.reference_name = referenceName
variant.start = position
variant.end = position + 1 # SNPs only for now
bases = ["A", "C", "G", "T"]
ref = randomNumberGenerator.choice(bases)
variant.reference_bases = ref
alt = randomNumberGenerator.choice(
[base for base in bases if base != ref])
variant.alternate_bases.append(alt)
randChoice = randomNumberGenerator.randint(0, 2)
if randChoice == 0:
variant.filters_applied = False
elif randChoice == 1:
variant.filters_applied = True
variant.filters_passed = True
else:
variant.filters_applied = True
variant.filters_passed = False
variant.filters_failed.append('q10')
for callSet in self.getCallSets():
call = variant.calls.add()
call.call_set_id = callSet.getId()
# for now, the genotype is either [0,1], [1,1] or [1,0] with equal
# probability; probably will want to do something more
# sophisticated later.
randomChoice = randomNumberGenerator.choice(
[[0, 1], [1, 0], [1, 1]])
call.genotype.extend(randomChoice)
# TODO What is a reasonable model for generating these likelihoods?
# Are these log-scaled? Spec does not say.
call.genotype_likelihood.extend([-100, -100, -100])
variant.id = self.getVariantId(variant)
return variant |
Populates this VariantSet from the specified DB row.
def populateFromRow(self, variantSetRecord):
"""
Populates this VariantSet from the specified DB row.
"""
self._created = variantSetRecord.created
self._updated = variantSetRecord.updated
self.setAttributesJson(variantSetRecord.attributes)
self._chromFileMap = {}
# We can't load directly as we want tuples to be stored
# rather than lists.
for key, value in json.loads(variantSetRecord.dataurlindexmap).items():
self._chromFileMap[key] = tuple(value)
self._metadata = []
for jsonDict in json.loads(variantSetRecord.metadata):
metadata = protocol.fromJson(json.dumps(jsonDict),
protocol.VariantSetMetadata)
self._metadata.append(metadata) |
Populates this variant set using the specified lists of data
files and indexes. These must be in the same order, such that
the jth index file corresponds to the jth data file.
def populateFromFile(self, dataUrls, indexFiles):
"""
Populates this variant set using the specified lists of data
files and indexes. These must be in the same order, such that
the jth index file corresponds to the jth data file.
"""
assert len(dataUrls) == len(indexFiles)
for dataUrl, indexFile in zip(dataUrls, indexFiles):
varFile = pysam.VariantFile(dataUrl, index_filename=indexFile)
try:
self._populateFromVariantFile(varFile, dataUrl, indexFile)
finally:
varFile.close() |
Populates this VariantSet by examing all the VCF files in the
specified directory. This is mainly used for as a convenience
for testing purposes.
def populateFromDirectory(self, vcfDirectory):
"""
Populates this VariantSet by examing all the VCF files in the
specified directory. This is mainly used for as a convenience
for testing purposes.
"""
pattern = os.path.join(vcfDirectory, "*.vcf.gz")
dataFiles = []
indexFiles = []
for vcfFile in glob.glob(pattern):
dataFiles.append(vcfFile)
indexFiles.append(vcfFile + ".tbi")
self.populateFromFile(dataFiles, indexFiles) |
Perform consistency check on the variant set
def checkConsistency(self):
"""
Perform consistency check on the variant set
"""
for referenceName, (dataUrl, indexFile) in self._chromFileMap.items():
varFile = pysam.VariantFile(dataUrl, index_filename=indexFile)
try:
for chrom in varFile.index:
chrom, _, _ = self.sanitizeVariantFileFetch(chrom)
if not isEmptyIter(varFile.fetch(chrom)):
self._checkMetadata(varFile)
self._checkCallSetIds(varFile)
finally:
varFile.close() |
Populates the instance variables of this VariantSet from the specified
pysam VariantFile object.
def _populateFromVariantFile(self, varFile, dataUrl, indexFile):
"""
Populates the instance variables of this VariantSet from the specified
pysam VariantFile object.
"""
if varFile.index is None:
raise exceptions.NotIndexedException(dataUrl)
for chrom in varFile.index:
# Unlike Tabix indices, CSI indices include all contigs defined
# in the BCF header. Thus we must test each one to see if
# records exist or else they are likely to trigger spurious
# overlapping errors.
chrom, _, _ = self.sanitizeVariantFileFetch(chrom)
if not isEmptyIter(varFile.fetch(chrom)):
if chrom in self._chromFileMap:
raise exceptions.OverlappingVcfException(dataUrl, chrom)
self._chromFileMap[chrom] = dataUrl, indexFile
self._updateMetadata(varFile)
self._updateCallSetIds(varFile)
self._updateVariantAnnotationSets(varFile, dataUrl) |
Updates the variant annotation set associated with this variant using
information in the specified pysam variantFile.
def _updateVariantAnnotationSets(self, variantFile, dataUrl):
"""
Updates the variant annotation set associated with this variant using
information in the specified pysam variantFile.
"""
# TODO check the consistency of this between VCF files.
if not self.isAnnotated():
annotationType = None
for record in variantFile.header.records:
if record.type == "GENERIC":
if record.key == "SnpEffVersion":
annotationType = ANNOTATIONS_SNPEFF
elif record.key == "VEP":
version = record.value.split()[0]
# TODO we need _much_ more sophisticated processing
# of VEP versions here. When do they become
# incompatible?
if version == "v82":
annotationType = ANNOTATIONS_VEP_V82
elif version == "v77":
annotationType = ANNOTATIONS_VEP_V77
else:
# TODO raise a proper typed exception there with
# the file name as an argument.
raise ValueError(
"Unsupported VEP version {} in '{}'".format(
version, dataUrl))
if annotationType is None:
infoKeys = variantFile.header.info.keys()
if 'CSQ' in infoKeys or 'ANN' in infoKeys:
# TODO likewise, we want a properly typed exception that
# we can throw back to the repo manager UI and display
# as an import error.
raise ValueError(
"Unsupported annotations in '{}'".format(dataUrl))
if annotationType is not None:
vas = HtslibVariantAnnotationSet(self, self.getLocalId())
vas.populateFromFile(variantFile, annotationType)
self.addVariantAnnotationSet(vas) |
Updates the metadata for his variant set based on the specified
variant file
def _updateMetadata(self, variantFile):
"""
Updates the metadata for his variant set based on the specified
variant file
"""
metadata = self._getMetadataFromVcf(variantFile)
if self._metadata is None:
self._metadata = metadata |
Checks that metadata is consistent
def _checkMetadata(self, variantFile):
"""
Checks that metadata is consistent
"""
metadata = self._getMetadataFromVcf(variantFile)
if self._metadata is not None and self._metadata != metadata:
raise exceptions.InconsistentMetaDataException(
variantFile.filename) |
Checks callSetIds for consistency
def _checkCallSetIds(self, variantFile):
"""
Checks callSetIds for consistency
"""
if len(self._callSetIdMap) > 0:
callSetIds = set([
self.getCallSetId(sample)
for sample in variantFile.header.samples])
if callSetIds != set(self._callSetIdMap.keys()):
raise exceptions.InconsistentCallSetIdException(
variantFile.filename) |
Updates the call set IDs based on the specified variant file.
def _updateCallSetIds(self, variantFile):
"""
Updates the call set IDs based on the specified variant file.
"""
if len(self._callSetIdMap) == 0:
for sample in variantFile.header.samples:
self.addCallSetFromName(sample) |
Converts the specified pysam variant record into a GA4GH Variant
object. Only calls for the specified list of callSetIds will
be included.
def convertVariant(self, record, callSetIds):
"""
Converts the specified pysam variant record into a GA4GH Variant
object. Only calls for the specified list of callSetIds will
be included.
"""
variant = self._createGaVariant()
variant.reference_name = record.contig
if record.id is not None:
variant.names.extend(record.id.split(';'))
variant.start = record.start # 0-based inclusive
variant.end = record.stop # 0-based exclusive
variant.reference_bases = record.ref
if record.alts is not None:
variant.alternate_bases.extend(list(record.alts))
filterKeys = record.filter.keys()
if len(filterKeys) == 0:
variant.filters_applied = False
else:
variant.filters_applied = True
if len(filterKeys) == 1 and filterKeys[0] == 'PASS':
variant.filters_passed = True
else:
variant.filters_passed = False
variant.filters_failed.extend(filterKeys)
# record.qual is also available, when supported by GAVariant.
for key, value in record.info.iteritems():
if value is None:
continue
if key == 'SVTYPE':
variant.variant_type = value
elif key == 'SVLEN':
variant.svlen = int(value[0])
elif key == 'CIPOS':
variant.cipos.extend(value)
elif key == 'CIEND':
variant.ciend.extend(value)
elif isinstance(value, str):
value = value.split(',')
protocol.setAttribute(
variant.attributes.attr[key].values, value)
for callSetId in callSetIds:
callSet = self.getCallSet(callSetId)
pysamCall = record.samples[str(callSet.getSampleName())]
variant.calls.add().CopyFrom(
self._convertGaCall(callSet, pysamCall))
variant.id = self.getVariantId(variant)
return variant |
Returns an iterator over the pysam VCF records corresponding to the
specified query.
def getPysamVariants(self, referenceName, startPosition, endPosition):
"""
Returns an iterator over the pysam VCF records corresponding to the
specified query.
"""
if referenceName in self._chromFileMap:
varFileName = self._chromFileMap[referenceName]
referenceName, startPosition, endPosition = \
self.sanitizeVariantFileFetch(
referenceName, startPosition, endPosition)
cursor = self.getFileHandle(varFileName).fetch(
referenceName, startPosition, endPosition)
for record in cursor:
yield record |
Returns an iterator over the specified variants. The parameters
correspond to the attributes of a GASearchVariantsRequest object.
def getVariants(self, referenceName, startPosition, endPosition,
callSetIds=[]):
"""
Returns an iterator over the specified variants. The parameters
correspond to the attributes of a GASearchVariantsRequest object.
"""
if callSetIds is None:
callSetIds = self._callSetIds
else:
for callSetId in callSetIds:
if callSetId not in self._callSetIds:
raise exceptions.CallSetNotInVariantSetException(
callSetId, self.getId())
for record in self.getPysamVariants(
referenceName, startPosition, endPosition):
yield self.convertVariant(record, callSetIds) |
Returns the id of a metadata
def getMetadataId(self, metadata):
"""
Returns the id of a metadata
"""
return str(datamodel.VariantSetMetadataCompoundId(
self.getCompoundId(), 'metadata:' + metadata.key)) |
Convenience method to set the common fields in a GA VariantAnnotation
object from this variant set.
def _createGaVariantAnnotation(self):
"""
Convenience method to set the common fields in a GA VariantAnnotation
object from this variant set.
"""
ret = protocol.VariantAnnotation()
ret.created = self._creationTime
ret.variant_annotation_set_id = self.getId()
return ret |
Converts this VariantAnnotationSet into its GA4GH protocol equivalent.
def toProtocolElement(self):
"""
Converts this VariantAnnotationSet into its GA4GH protocol equivalent.
"""
protocolElement = protocol.VariantAnnotationSet()
protocolElement.id = self.getId()
protocolElement.variant_set_id = self._variantSet.getId()
protocolElement.name = self.getLocalId()
protocolElement.analysis.CopyFrom(self.getAnalysis())
self.serializeAttributes(protocolElement)
return protocolElement |
Produces an MD5 hash of the gaVariant and gaVariantAnnotation objects
def hashVariantAnnotation(cls, gaVariant, gaVariantAnnotation):
"""
Produces an MD5 hash of the gaVariant and gaVariantAnnotation objects
"""
treffs = [treff.id for treff in gaVariantAnnotation.transcript_effects]
return hashlib.md5(
"{}\t{}\t{}\t".format(
gaVariant.reference_bases, tuple(gaVariant.alternate_bases),
treffs)
).hexdigest() |
Produces a stringified compoundId representing a variant
annotation.
:param gaVariant: protocol.Variant
:param gaAnnotation: protocol.VariantAnnotation
:return: compoundId String
def getVariantAnnotationId(self, gaVariant, gaAnnotation):
"""
Produces a stringified compoundId representing a variant
annotation.
:param gaVariant: protocol.Variant
:param gaAnnotation: protocol.VariantAnnotation
:return: compoundId String
"""
md5 = self.hashVariantAnnotation(gaVariant, gaAnnotation)
compoundId = datamodel.VariantAnnotationCompoundId(
self.getCompoundId(), gaVariant.reference_name,
str(gaVariant.start), md5)
return str(compoundId) |
Generate a random variant annotation based on a given variant.
This generator should be seeded with a value that is unique to the
variant so that the same annotation will always be produced regardless
of the order it is generated in.
def generateVariantAnnotation(self, variant):
"""
Generate a random variant annotation based on a given variant.
This generator should be seeded with a value that is unique to the
variant so that the same annotation will always be produced regardless
of the order it is generated in.
"""
# To make this reproducible, make a seed based on this
# specific variant.
seed = self._randomSeed + variant.start + variant.end
randomNumberGenerator = random.Random()
randomNumberGenerator.seed(seed)
ann = protocol.VariantAnnotation()
ann.variant_annotation_set_id = str(self.getCompoundId())
ann.variant_id = variant.id
ann.created = datetime.datetime.now().isoformat() + "Z"
# make a transcript effect for each alternate base element
# multiplied by a random integer (1,5)
for base in variant.alternate_bases:
ann.transcript_effects.add().CopyFrom(
self.generateTranscriptEffect(
variant, ann, base, randomNumberGenerator))
ann.id = self.getVariantAnnotationId(variant, ann)
return ann |
Populates this VariantAnnotationSet from the specified DB row.
def populateFromRow(self, annotationSetRecord):
"""
Populates this VariantAnnotationSet from the specified DB row.
"""
self._annotationType = annotationSetRecord.annotationtype
self._analysis = protocol.fromJson(
annotationSetRecord.analysis, protocol.Analysis)
self._creationTime = annotationSetRecord.created
self._updatedTime = annotationSetRecord.updated
self.setAttributesJson(annotationSetRecord.attributes) |
Assembles metadata within the VCF header into a GA4GH Analysis object.
:return: protocol.Analysis
def _getAnnotationAnalysis(self, varFile):
"""
Assembles metadata within the VCF header into a GA4GH Analysis object.
:return: protocol.Analysis
"""
header = varFile.header
analysis = protocol.Analysis()
formats = header.formats.items()
infos = header.info.items()
filters = header.filters.items()
for prefix, content in [("FORMAT", formats), ("INFO", infos),
("FILTER", filters)]:
for contentKey, value in content:
key = "{0}.{1}".format(prefix, value.name)
if key not in analysis.attributes.attr:
analysis.attributes.attr[key].Clear()
if value.description is not None:
analysis.attributes.attr[
key].values.add().string_value = value.description
analysis.created = self._creationTime
analysis.updated = self._updatedTime
for r in header.records:
# Don't add a key to info if there's nothing in the value
if r.value is not None:
if r.key not in analysis.attributes.attr:
analysis.attributes.attr[r.key].Clear()
analysis.attributes.attr[r.key] \
.values.add().string_value = str(r.value)
if r.key == "created" or r.key == "fileDate":
# TODO handle more date formats
try:
if '-' in r.value:
fmtStr = "%Y-%m-%d"
else:
fmtStr = "%Y%m%d"
analysis.created = datetime.datetime.strptime(
r.value, fmtStr).isoformat() + "Z"
except ValueError:
# is there a logger we should tell?
# print("INFO: Could not parse variant annotation time")
pass # analysis.create_date_time remains datetime.now()
if r.key == "software":
analysis.software.append(r.value)
if r.key == "name":
analysis.name = r.value
if r.key == "description":
analysis.description = r.value
analysis.id = str(datamodel.VariantAnnotationSetAnalysisCompoundId(
self._compoundId, "analysis"))
return analysis |
Generator for iterating through variant annotations in this
variant annotation set.
:param referenceName:
:param startPosition:
:param endPosition:
:return: generator of protocol.VariantAnnotation
def getVariantAnnotations(self, referenceName, startPosition, endPosition):
"""
Generator for iterating through variant annotations in this
variant annotation set.
:param referenceName:
:param startPosition:
:param endPosition:
:return: generator of protocol.VariantAnnotation
"""
variantIter = self._variantSet.getPysamVariants(
referenceName, startPosition, endPosition)
for record in variantIter:
yield self.convertVariantAnnotation(record) |
Accepts a position string (start/length) and returns
a GA4GH AlleleLocation with populated fields.
:param pos:
:return: protocol.AlleleLocation
def convertLocation(self, pos):
"""
Accepts a position string (start/length) and returns
a GA4GH AlleleLocation with populated fields.
:param pos:
:return: protocol.AlleleLocation
"""
if isUnspecified(pos):
return None
coordLen = pos.split('/')
if len(coordLen) > 1:
allLoc = self._createGaAlleleLocation()
allLoc.start = int(coordLen[0]) - 1
return allLoc
return None |
Accepts an annotation in HGVS notation and returns
an AlleleLocation with populated fields.
:param hgvsc:
:return:
def convertLocationHgvsC(self, hgvsc):
"""
Accepts an annotation in HGVS notation and returns
an AlleleLocation with populated fields.
:param hgvsc:
:return:
"""
if isUnspecified(hgvsc):
return None
match = re.match(".*c.(\d+)(\D+)>(\D+)", hgvsc)
if match:
pos = int(match.group(1))
if pos > 0:
allLoc = self._createGaAlleleLocation()
allLoc.start = pos - 1
allLoc.reference_sequence = match.group(2)
allLoc.alternate_sequence = match.group(3)
return allLoc
return None |
Accepts an annotation in HGVS notation and returns
an AlleleLocation with populated fields.
:param hgvsp:
:return: protocol.AlleleLocation
def convertLocationHgvsP(self, hgvsp):
"""
Accepts an annotation in HGVS notation and returns
an AlleleLocation with populated fields.
:param hgvsp:
:return: protocol.AlleleLocation
"""
if isUnspecified(hgvsp):
return None
match = re.match(".*p.(\D+)(\d+)(\D+)", hgvsp, flags=re.UNICODE)
if match is not None:
allLoc = self._createGaAlleleLocation()
allLoc.reference_sequence = match.group(1)
allLoc.start = int(match.group(2)) - 1
allLoc.alternate_sequence = match.group(3)
return allLoc
return None |
Adds locations to a GA4GH transcript effect object
by parsing HGVS annotation fields in concert with
and supplied position values.
:param effect: protocol.TranscriptEffect
:param protPos: String representing protein position from VCF
:param cdnaPos: String representing coding DNA location
:return: effect protocol.TranscriptEffect
def addLocations(self, effect, protPos, cdnaPos):
"""
Adds locations to a GA4GH transcript effect object
by parsing HGVS annotation fields in concert with
and supplied position values.
:param effect: protocol.TranscriptEffect
:param protPos: String representing protein position from VCF
:param cdnaPos: String representing coding DNA location
:return: effect protocol.TranscriptEffect
"""
self.addCDSLocation(effect, cdnaPos)
self.addCDNALocation(effect, cdnaPos)
self.addProteinLocation(effect, protPos)
return effect |
Takes the ANN string of a SnpEff generated VCF, splits it
and returns a populated GA4GH transcript effect object.
:param annStr: String
:param hgvsG: String
:return: effect protocol.TranscriptEffect()
def convertTranscriptEffect(self, annStr, hgvsG):
"""
Takes the ANN string of a SnpEff generated VCF, splits it
and returns a populated GA4GH transcript effect object.
:param annStr: String
:param hgvsG: String
:return: effect protocol.TranscriptEffect()
"""
effect = self._createGaTranscriptEffect()
effect.hgvs_annotation.CopyFrom(protocol.HGVSAnnotation())
annDict = dict()
if self._annotationType == ANNOTATIONS_SNPEFF:
annDict = dict(zip(self. SNPEFF_FIELDS, annStr.split("|")))
elif self._annotationType == ANNOTATIONS_VEP_V82:
annDict = dict(zip(self.VEP_FIELDS, annStr.split("|")))
else:
annDict = dict(zip(self.CSQ_FIELDS, annStr.split("|")))
annDict["hgvs_annotation.genomic"] = hgvsG if hgvsG else u''
for key, val in annDict.items():
try:
protocol.deepSetAttr(effect, key, val)
except AttributeError:
if val and key not in self.EXCLUDED_FIELDS:
protocol.setAttribute(
effect.attributes.attr[key].values, val)
effect.effects.extend(self.convertSeqOntology(annDict.get('effects')))
self.addLocations(
effect, annDict.get('protPos'), annDict.get('cdnaPos'))
effect.id = self.getTranscriptEffectId(effect)
return effect |
Splits a string of sequence ontology effects and creates
an ontology term record for each, which are built into
an array of return soTerms.
:param seqOntStr:
:return: [protocol.OntologyTerm]
def convertSeqOntology(self, seqOntStr):
"""
Splits a string of sequence ontology effects and creates
an ontology term record for each, which are built into
an array of return soTerms.
:param seqOntStr:
:return: [protocol.OntologyTerm]
"""
return [
self._ontology.getGaTermByName(soName)
for soName in seqOntStr.split('&')] |
Converts the specfied pysam variant record into a GA4GH variant
annotation object using the specified function to convert the
transcripts.
def convertVariantAnnotation(self, record):
"""
Converts the specfied pysam variant record into a GA4GH variant
annotation object using the specified function to convert the
transcripts.
"""
variant = self._variantSet.convertVariant(record, [])
annotation = self._createGaVariantAnnotation()
annotation.variant_id = variant.id
gDots = record.info.get(b'HGVS.g')
# Convert annotations from INFO field into TranscriptEffect
transcriptEffects = []
annotations = record.info.get(b'ANN') or record.info.get(b'CSQ')
for i, ann in enumerate(annotations):
hgvsG = gDots[i % len(variant.alternate_bases)] if gDots else None
transcriptEffects.append(self.convertTranscriptEffect(ann, hgvsG))
annotation.transcript_effects.extend(transcriptEffects)
annotation.id = self.getVariantAnnotationId(variant, annotation)
return variant, annotation |
Return name=value for a single attribute
def _attributeStr(self, name):
"""
Return name=value for a single attribute
"""
return "{}={}".format(
_encodeAttr(name),
",".join([_encodeAttr(v) for v in self.attributes[name]])) |
Return name=value, semi-colon-separated string for attributes,
including url-style quoting
def _attributeStrs(self):
"""
Return name=value, semi-colon-separated string for attributes,
including url-style quoting
"""
return ";".join([self._attributeStr(name)
for name in self.attributes.iterkeys()]) |
ID attribute from GFF3 or None if record doesn't have it.
Called "Name" rather than "Id" within GA4GH, as there is
no guarantee of either uniqueness or existence.
def featureName(self):
"""
ID attribute from GFF3 or None if record doesn't have it.
Called "Name" rather than "Id" within GA4GH, as there is
no guarantee of either uniqueness or existence.
"""
featId = self.attributes.get("ID")
if featId is not None:
featId = featId[0]
return featId |
Link a feature with its parents.
def _linkFeature(self, feature):
"""
Link a feature with its parents.
"""
parentNames = feature.attributes.get("Parent")
if parentNames is None:
self.roots.add(feature)
else:
for parentName in parentNames:
self._linkToParent(feature, parentName) |
Link a feature with its children
def _linkToParent(self, feature, parentName):
"""
Link a feature with its children
"""
parentParts = self.byFeatureName.get(parentName)
if parentParts is None:
raise GFF3Exception(
"Parent feature does not exist: {}".format(parentName),
self.fileName)
# parent maybe disjoint
for parentPart in parentParts:
feature.parents.add(parentPart)
parentPart.children.add(feature) |
finish loading the set, constructing the tree
def linkChildFeaturesToParents(self):
"""
finish loading the set, constructing the tree
"""
# features maybe disjoint
for featureParts in self.byFeatureName.itervalues():
for feature in featureParts:
self._linkFeature(feature) |
Sort order for Features, by genomic coordinate,
disambiguated by feature type (alphabetically).
def _recSortKey(r):
"""
Sort order for Features, by genomic coordinate,
disambiguated by feature type (alphabetically).
"""
return r.seqname, r.start, -r.end, r.type |
Writes a single record to a file provided by the filehandle fh.
def _writeRec(self, fh, rec):
"""
Writes a single record to a file provided by the filehandle fh.
"""
fh.write(str(rec) + "\n")
for child in sorted(rec.children, key=self._recSortKey):
self._writeRec(fh, child) |
Write set to a GFF3 format file.
:param file fh: file handle for file to write to
def write(self, fh):
"""
Write set to a GFF3 format file.
:param file fh: file handle for file to write to
"""
fh.write(GFF3_HEADER+"\n")
for root in sorted(self.roots, key=self._recSortKey):
self._writeRec(fh, root) |
open input file, optionally with decompression
def _open(self):
"""
open input file, optionally with decompression
"""
if self.fileName.endswith(".gz"):
return gzip.open(self.fileName)
elif self.fileName.endswith(".bz2"):
return bz2.BZ2File(self.fileName)
else:
return open(self.fileName) |
Returns tuple of tuple of (attr, value), multiple are returned to
handle multi-value attributes.
def _parseAttrVal(self, attrStr):
"""
Returns tuple of tuple of (attr, value), multiple are returned to
handle multi-value attributes.
"""
m = self.SPLIT_ATTR_RE.match(attrStr)
if m is None:
raise GFF3Exception(
"can't parse attribute/value: '" + attrStr +
"'", self.fileName, self.lineNumber)
name = urllib.unquote(m.group(1))
val = m.group(2)
# Split by comma to separate then unquote.
# Commas in values must be url encoded.
return name, [urllib.unquote(v) for v in val.split(',')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.