code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def _unkown_type(self, uridecodebin, decodebin, caps):
"""The callback for decodebin's "unknown-type" signal.
"""
# This is called *before* the stream becomes ready when the
# file can't be read.
streaminfo = caps.to_string()
if not streaminfo.startswith('audio/'):
# Ignore non-audio (e.g., video) decode errors.
return
self.read_exc = UnknownTypeError(streaminfo)
self.ready_sem.release()
|
def function[_unkown_type, parameter[self, uridecodebin, decodebin, caps]]:
constant[The callback for decodebin's "unknown-type" signal.
]
variable[streaminfo] assign[=] call[name[caps].to_string, parameter[]]
if <ast.UnaryOp object at 0x7da20c6a9840> begin[:]
return[None]
name[self].read_exc assign[=] call[name[UnknownTypeError], parameter[name[streaminfo]]]
call[name[self].ready_sem.release, parameter[]]
|
keyword[def] identifier[_unkown_type] ( identifier[self] , identifier[uridecodebin] , identifier[decodebin] , identifier[caps] ):
literal[string]
identifier[streaminfo] = identifier[caps] . identifier[to_string] ()
keyword[if] keyword[not] identifier[streaminfo] . identifier[startswith] ( literal[string] ):
keyword[return]
identifier[self] . identifier[read_exc] = identifier[UnknownTypeError] ( identifier[streaminfo] )
identifier[self] . identifier[ready_sem] . identifier[release] ()
|
def _unkown_type(self, uridecodebin, decodebin, caps):
"""The callback for decodebin's "unknown-type" signal.
"""
# This is called *before* the stream becomes ready when the
# file can't be read.
streaminfo = caps.to_string()
if not streaminfo.startswith('audio/'):
# Ignore non-audio (e.g., video) decode errors.
return # depends on [control=['if'], data=[]]
self.read_exc = UnknownTypeError(streaminfo)
self.ready_sem.release()
|
def _table_line(args):
"""Implements the BigQuery table magic used to display tables.
The supported syntax is:
%bigquery table -t|--table <name> <other args>
Args:
args: the arguments following '%bigquery table'.
Returns:
The HTML rendering for the table.
"""
# TODO(gram): It would be good to turn _table_viewer into a class that has a registered
# renderer. That would allow this to return a table viewer object which is easier to test.
name = args['table']
table = _get_table(name)
if table and table.exists():
fields = args['cols'].split(',') if args['cols'] else None
html = _table_viewer(table, rows_per_page=args['rows'], fields=fields)
return IPython.core.display.HTML(html)
else:
raise Exception('Table %s does not exist; cannot display' % name)
|
def function[_table_line, parameter[args]]:
constant[Implements the BigQuery table magic used to display tables.
The supported syntax is:
%bigquery table -t|--table <name> <other args>
Args:
args: the arguments following '%bigquery table'.
Returns:
The HTML rendering for the table.
]
variable[name] assign[=] call[name[args]][constant[table]]
variable[table] assign[=] call[name[_get_table], parameter[name[name]]]
if <ast.BoolOp object at 0x7da18f00f490> begin[:]
variable[fields] assign[=] <ast.IfExp object at 0x7da18f00c340>
variable[html] assign[=] call[name[_table_viewer], parameter[name[table]]]
return[call[name[IPython].core.display.HTML, parameter[name[html]]]]
|
keyword[def] identifier[_table_line] ( identifier[args] ):
literal[string]
identifier[name] = identifier[args] [ literal[string] ]
identifier[table] = identifier[_get_table] ( identifier[name] )
keyword[if] identifier[table] keyword[and] identifier[table] . identifier[exists] ():
identifier[fields] = identifier[args] [ literal[string] ]. identifier[split] ( literal[string] ) keyword[if] identifier[args] [ literal[string] ] keyword[else] keyword[None]
identifier[html] = identifier[_table_viewer] ( identifier[table] , identifier[rows_per_page] = identifier[args] [ literal[string] ], identifier[fields] = identifier[fields] )
keyword[return] identifier[IPython] . identifier[core] . identifier[display] . identifier[HTML] ( identifier[html] )
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[name] )
|
def _table_line(args):
"""Implements the BigQuery table magic used to display tables.
The supported syntax is:
%bigquery table -t|--table <name> <other args>
Args:
args: the arguments following '%bigquery table'.
Returns:
The HTML rendering for the table.
"""
# TODO(gram): It would be good to turn _table_viewer into a class that has a registered
# renderer. That would allow this to return a table viewer object which is easier to test.
name = args['table']
table = _get_table(name)
if table and table.exists():
fields = args['cols'].split(',') if args['cols'] else None
html = _table_viewer(table, rows_per_page=args['rows'], fields=fields)
return IPython.core.display.HTML(html) # depends on [control=['if'], data=[]]
else:
raise Exception('Table %s does not exist; cannot display' % name)
|
def refreshCatalogs(self):
"""
It reindexes the modified catalogs but, while cleanAndRebuildCatalogs
recatalogs all objects in the database, this method only reindexes over
the already cataloged objects.
If a metacolumn is added it refreshes the catalog, if only a new index
is added, it reindexes only those new indexes.
"""
to_refresh = self.refreshcatalog[:]
to_reindex = self.reindexcatalog.keys()
to_reindex = to_reindex[:]
done = []
# Start reindexing the catalogs with new columns
for catalog_to_refresh in to_refresh:
logger.info(
'Catalog {0} refreshing started'.format(catalog_to_refresh))
catalog = getToolByName(self.portal, catalog_to_refresh)
handler = ZLogHandler(self.pgthreshold)
catalog.refreshCatalog(pghandler=handler)
logger.info('Catalog {0} refreshed'.format(catalog_to_refresh))
transaction.commit()
done.append(catalog_to_refresh)
# Now the catalogs which only need reindxing
for catalog_to_reindex in to_reindex:
if catalog_to_reindex in done:
continue
logger.info(
'Catalog {0} reindexing started'.format(catalog_to_reindex))
catalog = getToolByName(
self.portal, catalog_to_reindex)
indexes = self.reindexcatalog[catalog_to_reindex]
handler = ZLogHandler(self.pgthreshold)
catalog.reindexIndex(indexes, None, pghandler=handler)
logger.info('Catalog {0} reindexed'.format(catalog_to_reindex))
transaction.commit()
done.append(catalog_to_reindex)
|
def function[refreshCatalogs, parameter[self]]:
constant[
It reindexes the modified catalogs but, while cleanAndRebuildCatalogs
recatalogs all objects in the database, this method only reindexes over
the already cataloged objects.
If a metacolumn is added it refreshes the catalog, if only a new index
is added, it reindexes only those new indexes.
]
variable[to_refresh] assign[=] call[name[self].refreshcatalog][<ast.Slice object at 0x7da18bcc9fc0>]
variable[to_reindex] assign[=] call[name[self].reindexcatalog.keys, parameter[]]
variable[to_reindex] assign[=] call[name[to_reindex]][<ast.Slice object at 0x7da18bcc8ca0>]
variable[done] assign[=] list[[]]
for taget[name[catalog_to_refresh]] in starred[name[to_refresh]] begin[:]
call[name[logger].info, parameter[call[constant[Catalog {0} refreshing started].format, parameter[name[catalog_to_refresh]]]]]
variable[catalog] assign[=] call[name[getToolByName], parameter[name[self].portal, name[catalog_to_refresh]]]
variable[handler] assign[=] call[name[ZLogHandler], parameter[name[self].pgthreshold]]
call[name[catalog].refreshCatalog, parameter[]]
call[name[logger].info, parameter[call[constant[Catalog {0} refreshed].format, parameter[name[catalog_to_refresh]]]]]
call[name[transaction].commit, parameter[]]
call[name[done].append, parameter[name[catalog_to_refresh]]]
for taget[name[catalog_to_reindex]] in starred[name[to_reindex]] begin[:]
if compare[name[catalog_to_reindex] in name[done]] begin[:]
continue
call[name[logger].info, parameter[call[constant[Catalog {0} reindexing started].format, parameter[name[catalog_to_reindex]]]]]
variable[catalog] assign[=] call[name[getToolByName], parameter[name[self].portal, name[catalog_to_reindex]]]
variable[indexes] assign[=] call[name[self].reindexcatalog][name[catalog_to_reindex]]
variable[handler] assign[=] call[name[ZLogHandler], parameter[name[self].pgthreshold]]
call[name[catalog].reindexIndex, parameter[name[indexes], constant[None]]]
call[name[logger].info, parameter[call[constant[Catalog {0} reindexed].format, parameter[name[catalog_to_reindex]]]]]
call[name[transaction].commit, parameter[]]
call[name[done].append, parameter[name[catalog_to_reindex]]]
|
keyword[def] identifier[refreshCatalogs] ( identifier[self] ):
literal[string]
identifier[to_refresh] = identifier[self] . identifier[refreshcatalog] [:]
identifier[to_reindex] = identifier[self] . identifier[reindexcatalog] . identifier[keys] ()
identifier[to_reindex] = identifier[to_reindex] [:]
identifier[done] =[]
keyword[for] identifier[catalog_to_refresh] keyword[in] identifier[to_refresh] :
identifier[logger] . identifier[info] (
literal[string] . identifier[format] ( identifier[catalog_to_refresh] ))
identifier[catalog] = identifier[getToolByName] ( identifier[self] . identifier[portal] , identifier[catalog_to_refresh] )
identifier[handler] = identifier[ZLogHandler] ( identifier[self] . identifier[pgthreshold] )
identifier[catalog] . identifier[refreshCatalog] ( identifier[pghandler] = identifier[handler] )
identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[catalog_to_refresh] ))
identifier[transaction] . identifier[commit] ()
identifier[done] . identifier[append] ( identifier[catalog_to_refresh] )
keyword[for] identifier[catalog_to_reindex] keyword[in] identifier[to_reindex] :
keyword[if] identifier[catalog_to_reindex] keyword[in] identifier[done] :
keyword[continue]
identifier[logger] . identifier[info] (
literal[string] . identifier[format] ( identifier[catalog_to_reindex] ))
identifier[catalog] = identifier[getToolByName] (
identifier[self] . identifier[portal] , identifier[catalog_to_reindex] )
identifier[indexes] = identifier[self] . identifier[reindexcatalog] [ identifier[catalog_to_reindex] ]
identifier[handler] = identifier[ZLogHandler] ( identifier[self] . identifier[pgthreshold] )
identifier[catalog] . identifier[reindexIndex] ( identifier[indexes] , keyword[None] , identifier[pghandler] = identifier[handler] )
identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[catalog_to_reindex] ))
identifier[transaction] . identifier[commit] ()
identifier[done] . identifier[append] ( identifier[catalog_to_reindex] )
|
def refreshCatalogs(self):
"""
It reindexes the modified catalogs but, while cleanAndRebuildCatalogs
recatalogs all objects in the database, this method only reindexes over
the already cataloged objects.
If a metacolumn is added it refreshes the catalog, if only a new index
is added, it reindexes only those new indexes.
"""
to_refresh = self.refreshcatalog[:]
to_reindex = self.reindexcatalog.keys()
to_reindex = to_reindex[:]
done = []
# Start reindexing the catalogs with new columns
for catalog_to_refresh in to_refresh:
logger.info('Catalog {0} refreshing started'.format(catalog_to_refresh))
catalog = getToolByName(self.portal, catalog_to_refresh)
handler = ZLogHandler(self.pgthreshold)
catalog.refreshCatalog(pghandler=handler)
logger.info('Catalog {0} refreshed'.format(catalog_to_refresh))
transaction.commit()
done.append(catalog_to_refresh) # depends on [control=['for'], data=['catalog_to_refresh']]
# Now the catalogs which only need reindxing
for catalog_to_reindex in to_reindex:
if catalog_to_reindex in done:
continue # depends on [control=['if'], data=[]]
logger.info('Catalog {0} reindexing started'.format(catalog_to_reindex))
catalog = getToolByName(self.portal, catalog_to_reindex)
indexes = self.reindexcatalog[catalog_to_reindex]
handler = ZLogHandler(self.pgthreshold)
catalog.reindexIndex(indexes, None, pghandler=handler)
logger.info('Catalog {0} reindexed'.format(catalog_to_reindex))
transaction.commit()
done.append(catalog_to_reindex) # depends on [control=['for'], data=['catalog_to_reindex']]
|
def load_fasttext_format(cls, path, ctx=cpu(), **kwargs):
"""Create an instance of the class and load weights.
Load the weights from the fastText binary format created by
https://github.com/facebookresearch/fastText
Parameters
----------
path : str
Path to the .bin model file.
ctx : mx.Context, default mx.cpu()
Context to initialize the weights on.
kwargs : dict
Keyword arguments are passed to the class initializer.
"""
with open(path, 'rb') as f:
new_format, dim, bucket, minn, maxn, = cls._read_model_params(f)
idx_to_token = cls._read_vocab(f, new_format)
dim, matrix = cls._read_vectors(f, new_format, bucket,
len(idx_to_token))
token_to_idx = {token: idx for idx, token in enumerate(idx_to_token)}
if len(token_to_idx) != len(idx_to_token):
# If multiple tokens with invalid encoding were collapsed in a
# single token due to replacement of invalid bytes with Unicode
# replacement character
warnings.warn(
'There are duplicate tokens in the embedding file. '
'This is likely due to decoding errors for some tokens, '
'where invalid bytes were replaced by '
'the Unicode replacement character. '
'This affects {} tokens.'.format(
len(idx_to_token) - len(token_to_idx)))
for _ in range(len(token_to_idx), len(idx_to_token)):
# Add pseudo tokens to make sure length is the same
token_to_idx[object()] = -1
assert len(token_to_idx) == len(idx_to_token)
subword_function = create_subword_function(
'NGramHashes', num_subwords=matrix.shape[0] - len(idx_to_token),
ngrams=list(range(minn, maxn + 1)), special_tokens={'</s>'})
self = cls(token_to_idx, subword_function, output_dim=dim, **kwargs)
self.initialize(ctx=ctx)
self.weight.set_data(nd.array(matrix))
return self
|
def function[load_fasttext_format, parameter[cls, path, ctx]]:
constant[Create an instance of the class and load weights.
Load the weights from the fastText binary format created by
https://github.com/facebookresearch/fastText
Parameters
----------
path : str
Path to the .bin model file.
ctx : mx.Context, default mx.cpu()
Context to initialize the weights on.
kwargs : dict
Keyword arguments are passed to the class initializer.
]
with call[name[open], parameter[name[path], constant[rb]]] begin[:]
<ast.Tuple object at 0x7da2041d94b0> assign[=] call[name[cls]._read_model_params, parameter[name[f]]]
variable[idx_to_token] assign[=] call[name[cls]._read_vocab, parameter[name[f], name[new_format]]]
<ast.Tuple object at 0x7da2041da950> assign[=] call[name[cls]._read_vectors, parameter[name[f], name[new_format], name[bucket], call[name[len], parameter[name[idx_to_token]]]]]
variable[token_to_idx] assign[=] <ast.DictComp object at 0x7da2041daa40>
if compare[call[name[len], parameter[name[token_to_idx]]] not_equal[!=] call[name[len], parameter[name[idx_to_token]]]] begin[:]
call[name[warnings].warn, parameter[call[constant[There are duplicate tokens in the embedding file. This is likely due to decoding errors for some tokens, where invalid bytes were replaced by the Unicode replacement character. This affects {} tokens.].format, parameter[binary_operation[call[name[len], parameter[name[idx_to_token]]] - call[name[len], parameter[name[token_to_idx]]]]]]]]
for taget[name[_]] in starred[call[name[range], parameter[call[name[len], parameter[name[token_to_idx]]], call[name[len], parameter[name[idx_to_token]]]]]] begin[:]
call[name[token_to_idx]][call[name[object], parameter[]]] assign[=] <ast.UnaryOp object at 0x7da2041d82e0>
assert[compare[call[name[len], parameter[name[token_to_idx]]] equal[==] call[name[len], parameter[name[idx_to_token]]]]]
variable[subword_function] assign[=] call[name[create_subword_function], parameter[constant[NGramHashes]]]
variable[self] assign[=] call[name[cls], parameter[name[token_to_idx], name[subword_function]]]
call[name[self].initialize, parameter[]]
call[name[self].weight.set_data, parameter[call[name[nd].array, parameter[name[matrix]]]]]
return[name[self]]
|
keyword[def] identifier[load_fasttext_format] ( identifier[cls] , identifier[path] , identifier[ctx] = identifier[cpu] (),** identifier[kwargs] ):
literal[string]
keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] :
identifier[new_format] , identifier[dim] , identifier[bucket] , identifier[minn] , identifier[maxn] ,= identifier[cls] . identifier[_read_model_params] ( identifier[f] )
identifier[idx_to_token] = identifier[cls] . identifier[_read_vocab] ( identifier[f] , identifier[new_format] )
identifier[dim] , identifier[matrix] = identifier[cls] . identifier[_read_vectors] ( identifier[f] , identifier[new_format] , identifier[bucket] ,
identifier[len] ( identifier[idx_to_token] ))
identifier[token_to_idx] ={ identifier[token] : identifier[idx] keyword[for] identifier[idx] , identifier[token] keyword[in] identifier[enumerate] ( identifier[idx_to_token] )}
keyword[if] identifier[len] ( identifier[token_to_idx] )!= identifier[len] ( identifier[idx_to_token] ):
identifier[warnings] . identifier[warn] (
literal[string]
literal[string]
literal[string]
literal[string]
literal[string] . identifier[format] (
identifier[len] ( identifier[idx_to_token] )- identifier[len] ( identifier[token_to_idx] )))
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[len] ( identifier[token_to_idx] ), identifier[len] ( identifier[idx_to_token] )):
identifier[token_to_idx] [ identifier[object] ()]=- literal[int]
keyword[assert] identifier[len] ( identifier[token_to_idx] )== identifier[len] ( identifier[idx_to_token] )
identifier[subword_function] = identifier[create_subword_function] (
literal[string] , identifier[num_subwords] = identifier[matrix] . identifier[shape] [ literal[int] ]- identifier[len] ( identifier[idx_to_token] ),
identifier[ngrams] = identifier[list] ( identifier[range] ( identifier[minn] , identifier[maxn] + literal[int] )), identifier[special_tokens] ={ literal[string] })
identifier[self] = identifier[cls] ( identifier[token_to_idx] , identifier[subword_function] , identifier[output_dim] = identifier[dim] ,** identifier[kwargs] )
identifier[self] . identifier[initialize] ( identifier[ctx] = identifier[ctx] )
identifier[self] . identifier[weight] . identifier[set_data] ( identifier[nd] . identifier[array] ( identifier[matrix] ))
keyword[return] identifier[self]
|
def load_fasttext_format(cls, path, ctx=cpu(), **kwargs):
"""Create an instance of the class and load weights.
Load the weights from the fastText binary format created by
https://github.com/facebookresearch/fastText
Parameters
----------
path : str
Path to the .bin model file.
ctx : mx.Context, default mx.cpu()
Context to initialize the weights on.
kwargs : dict
Keyword arguments are passed to the class initializer.
"""
with open(path, 'rb') as f:
(new_format, dim, bucket, minn, maxn) = cls._read_model_params(f)
idx_to_token = cls._read_vocab(f, new_format)
(dim, matrix) = cls._read_vectors(f, new_format, bucket, len(idx_to_token)) # depends on [control=['with'], data=['f']]
token_to_idx = {token: idx for (idx, token) in enumerate(idx_to_token)}
if len(token_to_idx) != len(idx_to_token):
# If multiple tokens with invalid encoding were collapsed in a
# single token due to replacement of invalid bytes with Unicode
# replacement character
warnings.warn('There are duplicate tokens in the embedding file. This is likely due to decoding errors for some tokens, where invalid bytes were replaced by the Unicode replacement character. This affects {} tokens.'.format(len(idx_to_token) - len(token_to_idx)))
for _ in range(len(token_to_idx), len(idx_to_token)):
# Add pseudo tokens to make sure length is the same
token_to_idx[object()] = -1 # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
assert len(token_to_idx) == len(idx_to_token)
subword_function = create_subword_function('NGramHashes', num_subwords=matrix.shape[0] - len(idx_to_token), ngrams=list(range(minn, maxn + 1)), special_tokens={'</s>'})
self = cls(token_to_idx, subword_function, output_dim=dim, **kwargs)
self.initialize(ctx=ctx)
self.weight.set_data(nd.array(matrix))
return self
|
def _get_param(self, param, allowed_values=None, optional=False):
"""Get parameter in GET request."""
request_params = self._request_params()
if param in request_params:
value = request_params[param].lower()
if allowed_values is not None:
if value in allowed_values:
self.params[param] = value
else:
raise OWSInvalidParameterValue("%s %s is not supported" % (param, value), value=param)
elif optional:
self.params[param] = None
else:
raise OWSMissingParameterValue('Parameter "%s" is missing' % param, value=param)
return self.params[param]
|
def function[_get_param, parameter[self, param, allowed_values, optional]]:
constant[Get parameter in GET request.]
variable[request_params] assign[=] call[name[self]._request_params, parameter[]]
if compare[name[param] in name[request_params]] begin[:]
variable[value] assign[=] call[call[name[request_params]][name[param]].lower, parameter[]]
if compare[name[allowed_values] is_not constant[None]] begin[:]
if compare[name[value] in name[allowed_values]] begin[:]
call[name[self].params][name[param]] assign[=] name[value]
return[call[name[self].params][name[param]]]
|
keyword[def] identifier[_get_param] ( identifier[self] , identifier[param] , identifier[allowed_values] = keyword[None] , identifier[optional] = keyword[False] ):
literal[string]
identifier[request_params] = identifier[self] . identifier[_request_params] ()
keyword[if] identifier[param] keyword[in] identifier[request_params] :
identifier[value] = identifier[request_params] [ identifier[param] ]. identifier[lower] ()
keyword[if] identifier[allowed_values] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[value] keyword[in] identifier[allowed_values] :
identifier[self] . identifier[params] [ identifier[param] ]= identifier[value]
keyword[else] :
keyword[raise] identifier[OWSInvalidParameterValue] ( literal[string] %( identifier[param] , identifier[value] ), identifier[value] = identifier[param] )
keyword[elif] identifier[optional] :
identifier[self] . identifier[params] [ identifier[param] ]= keyword[None]
keyword[else] :
keyword[raise] identifier[OWSMissingParameterValue] ( literal[string] % identifier[param] , identifier[value] = identifier[param] )
keyword[return] identifier[self] . identifier[params] [ identifier[param] ]
|
def _get_param(self, param, allowed_values=None, optional=False):
"""Get parameter in GET request."""
request_params = self._request_params()
if param in request_params:
value = request_params[param].lower()
if allowed_values is not None:
if value in allowed_values:
self.params[param] = value # depends on [control=['if'], data=['value']]
else:
raise OWSInvalidParameterValue('%s %s is not supported' % (param, value), value=param) # depends on [control=['if'], data=['allowed_values']] # depends on [control=['if'], data=['param', 'request_params']]
elif optional:
self.params[param] = None # depends on [control=['if'], data=[]]
else:
raise OWSMissingParameterValue('Parameter "%s" is missing' % param, value=param)
return self.params[param]
|
def validate_collections(self, model, context=None):
"""
Validate collection properties
Performs validation on collection properties to return a result object.
:param model: object or dict
:param context: object, dict or None
:return: shiftschema.result.Result
"""
result = Result()
for property_name in self.collections:
prop = self.collections[property_name]
collection = self.get(model, property_name)
errors = prop.validate(
value=collection,
model=model,
context=context
)
if len(errors):
result.add_collection_errors(
property_name=property_name,
direct_errors=errors
)
collection_errors = prop.validate_with_schema(
collection=collection,
context=context
)
result.add_collection_errors(
property_name=property_name,
collection_errors=collection_errors
)
return result
|
def function[validate_collections, parameter[self, model, context]]:
constant[
Validate collection properties
Performs validation on collection properties to return a result object.
:param model: object or dict
:param context: object, dict or None
:return: shiftschema.result.Result
]
variable[result] assign[=] call[name[Result], parameter[]]
for taget[name[property_name]] in starred[name[self].collections] begin[:]
variable[prop] assign[=] call[name[self].collections][name[property_name]]
variable[collection] assign[=] call[name[self].get, parameter[name[model], name[property_name]]]
variable[errors] assign[=] call[name[prop].validate, parameter[]]
if call[name[len], parameter[name[errors]]] begin[:]
call[name[result].add_collection_errors, parameter[]]
variable[collection_errors] assign[=] call[name[prop].validate_with_schema, parameter[]]
call[name[result].add_collection_errors, parameter[]]
return[name[result]]
|
keyword[def] identifier[validate_collections] ( identifier[self] , identifier[model] , identifier[context] = keyword[None] ):
literal[string]
identifier[result] = identifier[Result] ()
keyword[for] identifier[property_name] keyword[in] identifier[self] . identifier[collections] :
identifier[prop] = identifier[self] . identifier[collections] [ identifier[property_name] ]
identifier[collection] = identifier[self] . identifier[get] ( identifier[model] , identifier[property_name] )
identifier[errors] = identifier[prop] . identifier[validate] (
identifier[value] = identifier[collection] ,
identifier[model] = identifier[model] ,
identifier[context] = identifier[context]
)
keyword[if] identifier[len] ( identifier[errors] ):
identifier[result] . identifier[add_collection_errors] (
identifier[property_name] = identifier[property_name] ,
identifier[direct_errors] = identifier[errors]
)
identifier[collection_errors] = identifier[prop] . identifier[validate_with_schema] (
identifier[collection] = identifier[collection] ,
identifier[context] = identifier[context]
)
identifier[result] . identifier[add_collection_errors] (
identifier[property_name] = identifier[property_name] ,
identifier[collection_errors] = identifier[collection_errors]
)
keyword[return] identifier[result]
|
def validate_collections(self, model, context=None):
"""
Validate collection properties
Performs validation on collection properties to return a result object.
:param model: object or dict
:param context: object, dict or None
:return: shiftschema.result.Result
"""
result = Result()
for property_name in self.collections:
prop = self.collections[property_name]
collection = self.get(model, property_name)
errors = prop.validate(value=collection, model=model, context=context)
if len(errors):
result.add_collection_errors(property_name=property_name, direct_errors=errors) # depends on [control=['if'], data=[]]
collection_errors = prop.validate_with_schema(collection=collection, context=context)
result.add_collection_errors(property_name=property_name, collection_errors=collection_errors) # depends on [control=['for'], data=['property_name']]
return result
|
def DeleteOldRuns(self, job, cutoff_timestamp=None, token=None):
"""Deletes flows initiated by the job that are older than specified."""
if cutoff_timestamp is None:
raise ValueError("cutoff_timestamp can't be None")
child_flows = list(job.ListChildren(age=cutoff_timestamp))
with queue_manager.QueueManager(token=token) as queuemanager:
queuemanager.MultiDestroyFlowStates(child_flows)
aff4.FACTORY.MultiDelete(child_flows, token=token)
return len(child_flows)
|
def function[DeleteOldRuns, parameter[self, job, cutoff_timestamp, token]]:
constant[Deletes flows initiated by the job that are older than specified.]
if compare[name[cutoff_timestamp] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b1c3d480>
variable[child_flows] assign[=] call[name[list], parameter[call[name[job].ListChildren, parameter[]]]]
with call[name[queue_manager].QueueManager, parameter[]] begin[:]
call[name[queuemanager].MultiDestroyFlowStates, parameter[name[child_flows]]]
call[name[aff4].FACTORY.MultiDelete, parameter[name[child_flows]]]
return[call[name[len], parameter[name[child_flows]]]]
|
keyword[def] identifier[DeleteOldRuns] ( identifier[self] , identifier[job] , identifier[cutoff_timestamp] = keyword[None] , identifier[token] = keyword[None] ):
literal[string]
keyword[if] identifier[cutoff_timestamp] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[child_flows] = identifier[list] ( identifier[job] . identifier[ListChildren] ( identifier[age] = identifier[cutoff_timestamp] ))
keyword[with] identifier[queue_manager] . identifier[QueueManager] ( identifier[token] = identifier[token] ) keyword[as] identifier[queuemanager] :
identifier[queuemanager] . identifier[MultiDestroyFlowStates] ( identifier[child_flows] )
identifier[aff4] . identifier[FACTORY] . identifier[MultiDelete] ( identifier[child_flows] , identifier[token] = identifier[token] )
keyword[return] identifier[len] ( identifier[child_flows] )
|
def DeleteOldRuns(self, job, cutoff_timestamp=None, token=None):
"""Deletes flows initiated by the job that are older than specified."""
if cutoff_timestamp is None:
raise ValueError("cutoff_timestamp can't be None") # depends on [control=['if'], data=[]]
child_flows = list(job.ListChildren(age=cutoff_timestamp))
with queue_manager.QueueManager(token=token) as queuemanager:
queuemanager.MultiDestroyFlowStates(child_flows) # depends on [control=['with'], data=['queuemanager']]
aff4.FACTORY.MultiDelete(child_flows, token=token)
return len(child_flows)
|
def commit(self):
"""Commit mutations to the database.
:rtype: datetime
:returns: timestamp of the committed changes.
:raises ValueError: if there are no mutations to commit.
"""
self._check_state()
database = self._session._database
api = database.spanner_api
metadata = _metadata_with_prefix(database.name)
response = api.commit(
self._session.name,
self._mutations,
transaction_id=self._transaction_id,
metadata=metadata,
)
self.committed = _pb_timestamp_to_datetime(response.commit_timestamp)
del self._session._transaction
return self.committed
|
def function[commit, parameter[self]]:
constant[Commit mutations to the database.
:rtype: datetime
:returns: timestamp of the committed changes.
:raises ValueError: if there are no mutations to commit.
]
call[name[self]._check_state, parameter[]]
variable[database] assign[=] name[self]._session._database
variable[api] assign[=] name[database].spanner_api
variable[metadata] assign[=] call[name[_metadata_with_prefix], parameter[name[database].name]]
variable[response] assign[=] call[name[api].commit, parameter[name[self]._session.name, name[self]._mutations]]
name[self].committed assign[=] call[name[_pb_timestamp_to_datetime], parameter[name[response].commit_timestamp]]
<ast.Delete object at 0x7da20e954dc0>
return[name[self].committed]
|
keyword[def] identifier[commit] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_check_state] ()
identifier[database] = identifier[self] . identifier[_session] . identifier[_database]
identifier[api] = identifier[database] . identifier[spanner_api]
identifier[metadata] = identifier[_metadata_with_prefix] ( identifier[database] . identifier[name] )
identifier[response] = identifier[api] . identifier[commit] (
identifier[self] . identifier[_session] . identifier[name] ,
identifier[self] . identifier[_mutations] ,
identifier[transaction_id] = identifier[self] . identifier[_transaction_id] ,
identifier[metadata] = identifier[metadata] ,
)
identifier[self] . identifier[committed] = identifier[_pb_timestamp_to_datetime] ( identifier[response] . identifier[commit_timestamp] )
keyword[del] identifier[self] . identifier[_session] . identifier[_transaction]
keyword[return] identifier[self] . identifier[committed]
|
def commit(self):
"""Commit mutations to the database.
:rtype: datetime
:returns: timestamp of the committed changes.
:raises ValueError: if there are no mutations to commit.
"""
self._check_state()
database = self._session._database
api = database.spanner_api
metadata = _metadata_with_prefix(database.name)
response = api.commit(self._session.name, self._mutations, transaction_id=self._transaction_id, metadata=metadata)
self.committed = _pb_timestamp_to_datetime(response.commit_timestamp)
del self._session._transaction
return self.committed
|
def get_descriptor_for_layer(self, layer):
"""
Returns the standard JSON descriptor for the layer. There is a lot of
usefule information in there.
"""
if not layer in self._layer_descriptor_cache:
params = {'f': 'pjson'}
if self.token:
params['token'] = self.token
response = requests.get(self._build_request(layer), params=params)
self._layer_descriptor_cache[layer] = response.json()
return self._layer_descriptor_cache[layer]
|
def function[get_descriptor_for_layer, parameter[self, layer]]:
constant[
Returns the standard JSON descriptor for the layer. There is a lot of
usefule information in there.
]
if <ast.UnaryOp object at 0x7da18eb56260> begin[:]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18eb55f90>], [<ast.Constant object at 0x7da18eb54190>]]
if name[self].token begin[:]
call[name[params]][constant[token]] assign[=] name[self].token
variable[response] assign[=] call[name[requests].get, parameter[call[name[self]._build_request, parameter[name[layer]]]]]
call[name[self]._layer_descriptor_cache][name[layer]] assign[=] call[name[response].json, parameter[]]
return[call[name[self]._layer_descriptor_cache][name[layer]]]
|
keyword[def] identifier[get_descriptor_for_layer] ( identifier[self] , identifier[layer] ):
literal[string]
keyword[if] keyword[not] identifier[layer] keyword[in] identifier[self] . identifier[_layer_descriptor_cache] :
identifier[params] ={ literal[string] : literal[string] }
keyword[if] identifier[self] . identifier[token] :
identifier[params] [ literal[string] ]= identifier[self] . identifier[token]
identifier[response] = identifier[requests] . identifier[get] ( identifier[self] . identifier[_build_request] ( identifier[layer] ), identifier[params] = identifier[params] )
identifier[self] . identifier[_layer_descriptor_cache] [ identifier[layer] ]= identifier[response] . identifier[json] ()
keyword[return] identifier[self] . identifier[_layer_descriptor_cache] [ identifier[layer] ]
|
def get_descriptor_for_layer(self, layer):
"""
Returns the standard JSON descriptor for the layer. There is a lot of
usefule information in there.
"""
if not layer in self._layer_descriptor_cache:
params = {'f': 'pjson'}
if self.token:
params['token'] = self.token # depends on [control=['if'], data=[]]
response = requests.get(self._build_request(layer), params=params)
self._layer_descriptor_cache[layer] = response.json() # depends on [control=['if'], data=[]]
return self._layer_descriptor_cache[layer]
|
def gid(self):
"""Return the group id that the daemon will run with
:rtype: int
"""
if not self._gid:
if self.controller.config.daemon.group:
self._gid = grp.getgrnam(self.config.daemon.group).gr_gid
else:
self._gid = os.getgid()
return self._gid
|
def function[gid, parameter[self]]:
constant[Return the group id that the daemon will run with
:rtype: int
]
if <ast.UnaryOp object at 0x7da18fe938b0> begin[:]
if name[self].controller.config.daemon.group begin[:]
name[self]._gid assign[=] call[name[grp].getgrnam, parameter[name[self].config.daemon.group]].gr_gid
return[name[self]._gid]
|
keyword[def] identifier[gid] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_gid] :
keyword[if] identifier[self] . identifier[controller] . identifier[config] . identifier[daemon] . identifier[group] :
identifier[self] . identifier[_gid] = identifier[grp] . identifier[getgrnam] ( identifier[self] . identifier[config] . identifier[daemon] . identifier[group] ). identifier[gr_gid]
keyword[else] :
identifier[self] . identifier[_gid] = identifier[os] . identifier[getgid] ()
keyword[return] identifier[self] . identifier[_gid]
|
def gid(self):
"""Return the group id that the daemon will run with
:rtype: int
"""
if not self._gid:
if self.controller.config.daemon.group:
self._gid = grp.getgrnam(self.config.daemon.group).gr_gid # depends on [control=['if'], data=[]]
else:
self._gid = os.getgid() # depends on [control=['if'], data=[]]
return self._gid
|
def create(self, alpha_sender):
"""
Create a new AlphaSenderInstance
:param unicode alpha_sender: An Alphanumeric Sender ID string, up to 11 characters.
:returns: Newly created AlphaSenderInstance
:rtype: twilio.rest.messaging.v1.service.alpha_sender.AlphaSenderInstance
"""
data = values.of({'AlphaSender': alpha_sender, })
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return AlphaSenderInstance(self._version, payload, service_sid=self._solution['service_sid'], )
|
def function[create, parameter[self, alpha_sender]]:
constant[
Create a new AlphaSenderInstance
:param unicode alpha_sender: An Alphanumeric Sender ID string, up to 11 characters.
:returns: Newly created AlphaSenderInstance
:rtype: twilio.rest.messaging.v1.service.alpha_sender.AlphaSenderInstance
]
variable[data] assign[=] call[name[values].of, parameter[dictionary[[<ast.Constant object at 0x7da2054a61a0>], [<ast.Name object at 0x7da2054a46a0>]]]]
variable[payload] assign[=] call[name[self]._version.create, parameter[constant[POST], name[self]._uri]]
return[call[name[AlphaSenderInstance], parameter[name[self]._version, name[payload]]]]
|
keyword[def] identifier[create] ( identifier[self] , identifier[alpha_sender] ):
literal[string]
identifier[data] = identifier[values] . identifier[of] ({ literal[string] : identifier[alpha_sender] ,})
identifier[payload] = identifier[self] . identifier[_version] . identifier[create] (
literal[string] ,
identifier[self] . identifier[_uri] ,
identifier[data] = identifier[data] ,
)
keyword[return] identifier[AlphaSenderInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[service_sid] = identifier[self] . identifier[_solution] [ literal[string] ],)
|
def create(self, alpha_sender):
"""
Create a new AlphaSenderInstance
:param unicode alpha_sender: An Alphanumeric Sender ID string, up to 11 characters.
:returns: Newly created AlphaSenderInstance
:rtype: twilio.rest.messaging.v1.service.alpha_sender.AlphaSenderInstance
"""
data = values.of({'AlphaSender': alpha_sender})
payload = self._version.create('POST', self._uri, data=data)
return AlphaSenderInstance(self._version, payload, service_sid=self._solution['service_sid'])
|
def _filter_matrix_rows(cls, matrix):
'''matrix = output from _to_matrix'''
indexes_to_keep = []
for i in range(len(matrix)):
keep_row = False
for element in matrix[i]:
if element not in {'NA', 'no'}:
keep_row = True
break
if keep_row:
indexes_to_keep.append(i)
return [matrix[i] for i in indexes_to_keep]
|
def function[_filter_matrix_rows, parameter[cls, matrix]]:
constant[matrix = output from _to_matrix]
variable[indexes_to_keep] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[matrix]]]]]] begin[:]
variable[keep_row] assign[=] constant[False]
for taget[name[element]] in starred[call[name[matrix]][name[i]]] begin[:]
if compare[name[element] <ast.NotIn object at 0x7da2590d7190> <ast.Set object at 0x7da18f00e110>] begin[:]
variable[keep_row] assign[=] constant[True]
break
if name[keep_row] begin[:]
call[name[indexes_to_keep].append, parameter[name[i]]]
return[<ast.ListComp object at 0x7da18fe910c0>]
|
keyword[def] identifier[_filter_matrix_rows] ( identifier[cls] , identifier[matrix] ):
literal[string]
identifier[indexes_to_keep] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[matrix] )):
identifier[keep_row] = keyword[False]
keyword[for] identifier[element] keyword[in] identifier[matrix] [ identifier[i] ]:
keyword[if] identifier[element] keyword[not] keyword[in] { literal[string] , literal[string] }:
identifier[keep_row] = keyword[True]
keyword[break]
keyword[if] identifier[keep_row] :
identifier[indexes_to_keep] . identifier[append] ( identifier[i] )
keyword[return] [ identifier[matrix] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[indexes_to_keep] ]
|
def _filter_matrix_rows(cls, matrix):
"""matrix = output from _to_matrix"""
indexes_to_keep = []
for i in range(len(matrix)):
keep_row = False
for element in matrix[i]:
if element not in {'NA', 'no'}:
keep_row = True
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['element']]
if keep_row:
indexes_to_keep.append(i) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return [matrix[i] for i in indexes_to_keep]
|
def add_object_to_scope(self, obj):
"""Add an object to the appropriate scope block.
Args:
obj: JSSObject to add to scope. Accepted subclasses are:
Computer
ComputerGroup
Building
Department
Raises:
TypeError if invalid obj type is provided.
"""
if isinstance(obj, Computer):
self.add_object_to_path(obj, "scope/computers")
elif isinstance(obj, ComputerGroup):
self.add_object_to_path(obj, "scope/computer_groups")
elif isinstance(obj, Building):
self.add_object_to_path(obj, "scope/buildings")
elif isinstance(obj, Department):
self.add_object_to_path(obj, "scope/departments")
else:
raise TypeError
|
def function[add_object_to_scope, parameter[self, obj]]:
constant[Add an object to the appropriate scope block.
Args:
obj: JSSObject to add to scope. Accepted subclasses are:
Computer
ComputerGroup
Building
Department
Raises:
TypeError if invalid obj type is provided.
]
if call[name[isinstance], parameter[name[obj], name[Computer]]] begin[:]
call[name[self].add_object_to_path, parameter[name[obj], constant[scope/computers]]]
|
keyword[def] identifier[add_object_to_scope] ( identifier[self] , identifier[obj] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[Computer] ):
identifier[self] . identifier[add_object_to_path] ( identifier[obj] , literal[string] )
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[ComputerGroup] ):
identifier[self] . identifier[add_object_to_path] ( identifier[obj] , literal[string] )
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[Building] ):
identifier[self] . identifier[add_object_to_path] ( identifier[obj] , literal[string] )
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[Department] ):
identifier[self] . identifier[add_object_to_path] ( identifier[obj] , literal[string] )
keyword[else] :
keyword[raise] identifier[TypeError]
|
def add_object_to_scope(self, obj):
"""Add an object to the appropriate scope block.
Args:
obj: JSSObject to add to scope. Accepted subclasses are:
Computer
ComputerGroup
Building
Department
Raises:
TypeError if invalid obj type is provided.
"""
if isinstance(obj, Computer):
self.add_object_to_path(obj, 'scope/computers') # depends on [control=['if'], data=[]]
elif isinstance(obj, ComputerGroup):
self.add_object_to_path(obj, 'scope/computer_groups') # depends on [control=['if'], data=[]]
elif isinstance(obj, Building):
self.add_object_to_path(obj, 'scope/buildings') # depends on [control=['if'], data=[]]
elif isinstance(obj, Department):
self.add_object_to_path(obj, 'scope/departments') # depends on [control=['if'], data=[]]
else:
raise TypeError
|
def urandom(*args: Any, **kwargs: Any) -> bytes:
"""Return a bytes object containing random bytes.
:return: Bytes.
"""
return os.urandom(*args, **kwargs)
|
def function[urandom, parameter[]]:
constant[Return a bytes object containing random bytes.
:return: Bytes.
]
return[call[name[os].urandom, parameter[<ast.Starred object at 0x7da18f58ec50>]]]
|
keyword[def] identifier[urandom] (* identifier[args] : identifier[Any] ,** identifier[kwargs] : identifier[Any] )-> identifier[bytes] :
literal[string]
keyword[return] identifier[os] . identifier[urandom] (* identifier[args] ,** identifier[kwargs] )
|
def urandom(*args: Any, **kwargs: Any) -> bytes:
"""Return a bytes object containing random bytes.
:return: Bytes.
"""
return os.urandom(*args, **kwargs)
|
def are_connected(self, body_a, body_b):
'''Determine whether the given bodies are currently connected.
Parameters
----------
body_a : str or :class:`Body`
One body to test for connectedness. If this is a string, it is
treated as the name of a body to look up.
body_b : str or :class:`Body`
One body to test for connectedness. If this is a string, it is
treated as the name of a body to look up.
Returns
-------
connected : bool
Return True iff the two bodies are connected.
'''
return bool(ode.areConnected(
self.get_body(body_a).ode_body,
self.get_body(body_b).ode_body))
|
def function[are_connected, parameter[self, body_a, body_b]]:
constant[Determine whether the given bodies are currently connected.
Parameters
----------
body_a : str or :class:`Body`
One body to test for connectedness. If this is a string, it is
treated as the name of a body to look up.
body_b : str or :class:`Body`
One body to test for connectedness. If this is a string, it is
treated as the name of a body to look up.
Returns
-------
connected : bool
Return True iff the two bodies are connected.
]
return[call[name[bool], parameter[call[name[ode].areConnected, parameter[call[name[self].get_body, parameter[name[body_a]]].ode_body, call[name[self].get_body, parameter[name[body_b]]].ode_body]]]]]
|
keyword[def] identifier[are_connected] ( identifier[self] , identifier[body_a] , identifier[body_b] ):
literal[string]
keyword[return] identifier[bool] ( identifier[ode] . identifier[areConnected] (
identifier[self] . identifier[get_body] ( identifier[body_a] ). identifier[ode_body] ,
identifier[self] . identifier[get_body] ( identifier[body_b] ). identifier[ode_body] ))
|
def are_connected(self, body_a, body_b):
"""Determine whether the given bodies are currently connected.
Parameters
----------
body_a : str or :class:`Body`
One body to test for connectedness. If this is a string, it is
treated as the name of a body to look up.
body_b : str or :class:`Body`
One body to test for connectedness. If this is a string, it is
treated as the name of a body to look up.
Returns
-------
connected : bool
Return True iff the two bodies are connected.
"""
return bool(ode.areConnected(self.get_body(body_a).ode_body, self.get_body(body_b).ode_body))
|
def MAFFT(sequences, gap_open=1.53, gap_extension=0.0, retree=2):
'''A Coral wrapper for the MAFFT command line multiple sequence aligner.
:param sequences: A list of sequences to align.
:type sequences: List of homogeneous sequences (all DNA, or all RNA,
etc.)
:param gap_open: --op (gap open) penalty in MAFFT cli.
:type gap_open: float
:param gap_extension: --ep (gap extension) penalty in MAFFT cli.
:type gap_extension: float
:param retree: Number of times to build the guide tree.
:type retree: int
'''
arguments = ['mafft']
arguments += ['--op', str(gap_open)]
arguments += ['--ep', str(gap_extension)]
arguments += ['--retree', str(retree)]
arguments.append('input.fasta')
tempdir = tempfile.mkdtemp()
try:
with open(os.path.join(tempdir, 'input.fasta'), 'w') as f:
for i, sequence in enumerate(sequences):
if hasattr(sequence, 'name'):
name = sequence.name
else:
name = 'sequence{}'.format(i)
f.write('>{}\n'.format(name))
f.write(str(sequence) + '\n')
process = subprocess.Popen(arguments, stdout=subprocess.PIPE,
stderr=open(os.devnull, 'w'), cwd=tempdir)
stdout = process.communicate()[0]
finally:
shutil.rmtree(tempdir)
# Process stdout into something downstream process can use
records = stdout.split('>')
# First line is now blank
records.pop(0)
aligned_list = []
for record in records:
lines = record.split('\n')
name = lines.pop(0)
aligned_list.append(coral.DNA(''.join(lines)))
return aligned_list
|
def function[MAFFT, parameter[sequences, gap_open, gap_extension, retree]]:
constant[A Coral wrapper for the MAFFT command line multiple sequence aligner.
:param sequences: A list of sequences to align.
:type sequences: List of homogeneous sequences (all DNA, or all RNA,
etc.)
:param gap_open: --op (gap open) penalty in MAFFT cli.
:type gap_open: float
:param gap_extension: --ep (gap extension) penalty in MAFFT cli.
:type gap_extension: float
:param retree: Number of times to build the guide tree.
:type retree: int
]
variable[arguments] assign[=] list[[<ast.Constant object at 0x7da20c76cf40>]]
<ast.AugAssign object at 0x7da20c76c970>
<ast.AugAssign object at 0x7da20c76d720>
<ast.AugAssign object at 0x7da1b06df670>
call[name[arguments].append, parameter[constant[input.fasta]]]
variable[tempdir] assign[=] call[name[tempfile].mkdtemp, parameter[]]
<ast.Try object at 0x7da1b06deb30>
variable[records] assign[=] call[name[stdout].split, parameter[constant[>]]]
call[name[records].pop, parameter[constant[0]]]
variable[aligned_list] assign[=] list[[]]
for taget[name[record]] in starred[name[records]] begin[:]
variable[lines] assign[=] call[name[record].split, parameter[constant[
]]]
variable[name] assign[=] call[name[lines].pop, parameter[constant[0]]]
call[name[aligned_list].append, parameter[call[name[coral].DNA, parameter[call[constant[].join, parameter[name[lines]]]]]]]
return[name[aligned_list]]
|
keyword[def] identifier[MAFFT] ( identifier[sequences] , identifier[gap_open] = literal[int] , identifier[gap_extension] = literal[int] , identifier[retree] = literal[int] ):
literal[string]
identifier[arguments] =[ literal[string] ]
identifier[arguments] +=[ literal[string] , identifier[str] ( identifier[gap_open] )]
identifier[arguments] +=[ literal[string] , identifier[str] ( identifier[gap_extension] )]
identifier[arguments] +=[ literal[string] , identifier[str] ( identifier[retree] )]
identifier[arguments] . identifier[append] ( literal[string] )
identifier[tempdir] = identifier[tempfile] . identifier[mkdtemp] ()
keyword[try] :
keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[tempdir] , literal[string] ), literal[string] ) keyword[as] identifier[f] :
keyword[for] identifier[i] , identifier[sequence] keyword[in] identifier[enumerate] ( identifier[sequences] ):
keyword[if] identifier[hasattr] ( identifier[sequence] , literal[string] ):
identifier[name] = identifier[sequence] . identifier[name]
keyword[else] :
identifier[name] = literal[string] . identifier[format] ( identifier[i] )
identifier[f] . identifier[write] ( literal[string] . identifier[format] ( identifier[name] ))
identifier[f] . identifier[write] ( identifier[str] ( identifier[sequence] )+ literal[string] )
identifier[process] = identifier[subprocess] . identifier[Popen] ( identifier[arguments] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,
identifier[stderr] = identifier[open] ( identifier[os] . identifier[devnull] , literal[string] ), identifier[cwd] = identifier[tempdir] )
identifier[stdout] = identifier[process] . identifier[communicate] ()[ literal[int] ]
keyword[finally] :
identifier[shutil] . identifier[rmtree] ( identifier[tempdir] )
identifier[records] = identifier[stdout] . identifier[split] ( literal[string] )
identifier[records] . identifier[pop] ( literal[int] )
identifier[aligned_list] =[]
keyword[for] identifier[record] keyword[in] identifier[records] :
identifier[lines] = identifier[record] . identifier[split] ( literal[string] )
identifier[name] = identifier[lines] . identifier[pop] ( literal[int] )
identifier[aligned_list] . identifier[append] ( identifier[coral] . identifier[DNA] ( literal[string] . identifier[join] ( identifier[lines] )))
keyword[return] identifier[aligned_list]
|
def MAFFT(sequences, gap_open=1.53, gap_extension=0.0, retree=2):
"""A Coral wrapper for the MAFFT command line multiple sequence aligner.
:param sequences: A list of sequences to align.
:type sequences: List of homogeneous sequences (all DNA, or all RNA,
etc.)
:param gap_open: --op (gap open) penalty in MAFFT cli.
:type gap_open: float
:param gap_extension: --ep (gap extension) penalty in MAFFT cli.
:type gap_extension: float
:param retree: Number of times to build the guide tree.
:type retree: int
"""
arguments = ['mafft']
arguments += ['--op', str(gap_open)]
arguments += ['--ep', str(gap_extension)]
arguments += ['--retree', str(retree)]
arguments.append('input.fasta')
tempdir = tempfile.mkdtemp()
try:
with open(os.path.join(tempdir, 'input.fasta'), 'w') as f:
for (i, sequence) in enumerate(sequences):
if hasattr(sequence, 'name'):
name = sequence.name # depends on [control=['if'], data=[]]
else:
name = 'sequence{}'.format(i)
f.write('>{}\n'.format(name))
f.write(str(sequence) + '\n') # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['f']]
process = subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=open(os.devnull, 'w'), cwd=tempdir)
stdout = process.communicate()[0] # depends on [control=['try'], data=[]]
finally:
shutil.rmtree(tempdir)
# Process stdout into something downstream process can use
records = stdout.split('>')
# First line is now blank
records.pop(0)
aligned_list = []
for record in records:
lines = record.split('\n')
name = lines.pop(0)
aligned_list.append(coral.DNA(''.join(lines))) # depends on [control=['for'], data=['record']]
return aligned_list
|
def build(self, obj=None, queryset=None, push=True):
"""Build indexes."""
if obj is not None and queryset is not None:
raise ValueError(
"Only one of 'obj' and 'queryset' parameters can be passed to the build method."
)
if obj is not None:
if self.queryset.model != obj._meta.model: # pylint: disable=protected-access
logger.debug(
"Object type mismatch, skipping build of '%s' Elasticsearch index.",
self.__class__.__name__
)
return
if not self.queryset.filter(pk=self.get_object_id(obj)).exists():
logger.debug(
"Object not in predefined queryset, skipping build of '%s' Elasticsearch index.",
self.__class__.__name__
)
return
elif queryset is not None:
if self.queryset.model != queryset.model:
logger.debug(
"Queryset type mismatch, skipping build of '%s' Elasticsearch index.",
self.__class__.__name__
)
return
FULL_REBUILD = 'full' # pylint: disable=invalid-name
def handler(agg=None):
"""Index build handler."""
if agg == FULL_REBUILD:
queryset = self.queryset.all()
else:
queryset = self.queryset.none().union(*agg)
self._build(queryset=queryset, push=push)
def aggregator(agg=None):
"""Index build aggregator."""
if agg == FULL_REBUILD:
# A full rebuild is required, ignore any other builds.
pass
else:
if agg is None:
agg = []
if obj is not None:
# Build of a single object.
agg.append(self.queryset.filter(pk=obj.pk))
elif queryset is not None:
# Build of multiple objects.
agg.append(queryset)
else:
# Full rebuild, ignore any other builds.
agg = FULL_REBUILD
return agg
batcher = PrioritizedBatcher.global_instance()
if batcher.is_started:
batcher.add('resolwe.elastic', handler, group_by=(self._index_name, push), aggregator=aggregator)
else:
self._build(obj=obj, queryset=queryset, push=push)
|
def function[build, parameter[self, obj, queryset, push]]:
constant[Build indexes.]
if <ast.BoolOp object at 0x7da20e9b38e0> begin[:]
<ast.Raise object at 0x7da20e9b2830>
if compare[name[obj] is_not constant[None]] begin[:]
if compare[name[self].queryset.model not_equal[!=] name[obj]._meta.model] begin[:]
call[name[logger].debug, parameter[constant[Object type mismatch, skipping build of '%s' Elasticsearch index.], name[self].__class__.__name__]]
return[None]
if <ast.UnaryOp object at 0x7da20e9b1a80> begin[:]
call[name[logger].debug, parameter[constant[Object not in predefined queryset, skipping build of '%s' Elasticsearch index.], name[self].__class__.__name__]]
return[None]
variable[FULL_REBUILD] assign[=] constant[full]
def function[handler, parameter[agg]]:
constant[Index build handler.]
if compare[name[agg] equal[==] name[FULL_REBUILD]] begin[:]
variable[queryset] assign[=] call[name[self].queryset.all, parameter[]]
call[name[self]._build, parameter[]]
def function[aggregator, parameter[agg]]:
constant[Index build aggregator.]
if compare[name[agg] equal[==] name[FULL_REBUILD]] begin[:]
pass
return[name[agg]]
variable[batcher] assign[=] call[name[PrioritizedBatcher].global_instance, parameter[]]
if name[batcher].is_started begin[:]
call[name[batcher].add, parameter[constant[resolwe.elastic], name[handler]]]
|
keyword[def] identifier[build] ( identifier[self] , identifier[obj] = keyword[None] , identifier[queryset] = keyword[None] , identifier[push] = keyword[True] ):
literal[string]
keyword[if] identifier[obj] keyword[is] keyword[not] keyword[None] keyword[and] identifier[queryset] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[ValueError] (
literal[string]
)
keyword[if] identifier[obj] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[queryset] . identifier[model] != identifier[obj] . identifier[_meta] . identifier[model] :
identifier[logger] . identifier[debug] (
literal[string] ,
identifier[self] . identifier[__class__] . identifier[__name__]
)
keyword[return]
keyword[if] keyword[not] identifier[self] . identifier[queryset] . identifier[filter] ( identifier[pk] = identifier[self] . identifier[get_object_id] ( identifier[obj] )). identifier[exists] ():
identifier[logger] . identifier[debug] (
literal[string] ,
identifier[self] . identifier[__class__] . identifier[__name__]
)
keyword[return]
keyword[elif] identifier[queryset] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[queryset] . identifier[model] != identifier[queryset] . identifier[model] :
identifier[logger] . identifier[debug] (
literal[string] ,
identifier[self] . identifier[__class__] . identifier[__name__]
)
keyword[return]
identifier[FULL_REBUILD] = literal[string]
keyword[def] identifier[handler] ( identifier[agg] = keyword[None] ):
literal[string]
keyword[if] identifier[agg] == identifier[FULL_REBUILD] :
identifier[queryset] = identifier[self] . identifier[queryset] . identifier[all] ()
keyword[else] :
identifier[queryset] = identifier[self] . identifier[queryset] . identifier[none] (). identifier[union] (* identifier[agg] )
identifier[self] . identifier[_build] ( identifier[queryset] = identifier[queryset] , identifier[push] = identifier[push] )
keyword[def] identifier[aggregator] ( identifier[agg] = keyword[None] ):
literal[string]
keyword[if] identifier[agg] == identifier[FULL_REBUILD] :
keyword[pass]
keyword[else] :
keyword[if] identifier[agg] keyword[is] keyword[None] :
identifier[agg] =[]
keyword[if] identifier[obj] keyword[is] keyword[not] keyword[None] :
identifier[agg] . identifier[append] ( identifier[self] . identifier[queryset] . identifier[filter] ( identifier[pk] = identifier[obj] . identifier[pk] ))
keyword[elif] identifier[queryset] keyword[is] keyword[not] keyword[None] :
identifier[agg] . identifier[append] ( identifier[queryset] )
keyword[else] :
identifier[agg] = identifier[FULL_REBUILD]
keyword[return] identifier[agg]
identifier[batcher] = identifier[PrioritizedBatcher] . identifier[global_instance] ()
keyword[if] identifier[batcher] . identifier[is_started] :
identifier[batcher] . identifier[add] ( literal[string] , identifier[handler] , identifier[group_by] =( identifier[self] . identifier[_index_name] , identifier[push] ), identifier[aggregator] = identifier[aggregator] )
keyword[else] :
identifier[self] . identifier[_build] ( identifier[obj] = identifier[obj] , identifier[queryset] = identifier[queryset] , identifier[push] = identifier[push] )
|
def build(self, obj=None, queryset=None, push=True):
"""Build indexes."""
if obj is not None and queryset is not None:
raise ValueError("Only one of 'obj' and 'queryset' parameters can be passed to the build method.") # depends on [control=['if'], data=[]]
if obj is not None:
if self.queryset.model != obj._meta.model: # pylint: disable=protected-access
logger.debug("Object type mismatch, skipping build of '%s' Elasticsearch index.", self.__class__.__name__)
return # depends on [control=['if'], data=[]]
if not self.queryset.filter(pk=self.get_object_id(obj)).exists():
logger.debug("Object not in predefined queryset, skipping build of '%s' Elasticsearch index.", self.__class__.__name__)
return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['obj']]
elif queryset is not None:
if self.queryset.model != queryset.model:
logger.debug("Queryset type mismatch, skipping build of '%s' Elasticsearch index.", self.__class__.__name__)
return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['queryset']]
FULL_REBUILD = 'full' # pylint: disable=invalid-name
def handler(agg=None):
"""Index build handler."""
if agg == FULL_REBUILD:
queryset = self.queryset.all() # depends on [control=['if'], data=[]]
else:
queryset = self.queryset.none().union(*agg)
self._build(queryset=queryset, push=push)
def aggregator(agg=None):
"""Index build aggregator."""
if agg == FULL_REBUILD:
# A full rebuild is required, ignore any other builds.
pass # depends on [control=['if'], data=[]]
else:
if agg is None:
agg = [] # depends on [control=['if'], data=['agg']]
if obj is not None:
# Build of a single object.
agg.append(self.queryset.filter(pk=obj.pk)) # depends on [control=['if'], data=['obj']]
elif queryset is not None:
# Build of multiple objects.
agg.append(queryset) # depends on [control=['if'], data=['queryset']]
else:
# Full rebuild, ignore any other builds.
agg = FULL_REBUILD
return agg
batcher = PrioritizedBatcher.global_instance()
if batcher.is_started:
batcher.add('resolwe.elastic', handler, group_by=(self._index_name, push), aggregator=aggregator) # depends on [control=['if'], data=[]]
else:
self._build(obj=obj, queryset=queryset, push=push)
|
def disable_precompute(panel):
"""Cancel precomputation for `panel`"""
task_id = panel['data_source']['precompute']['task_id']
result = scheduler_client.cancel(task_id)
if result['status'] != 'success':
raise RuntimeError(result.get('reason'))
|
def function[disable_precompute, parameter[panel]]:
constant[Cancel precomputation for `panel`]
variable[task_id] assign[=] call[call[call[name[panel]][constant[data_source]]][constant[precompute]]][constant[task_id]]
variable[result] assign[=] call[name[scheduler_client].cancel, parameter[name[task_id]]]
if compare[call[name[result]][constant[status]] not_equal[!=] constant[success]] begin[:]
<ast.Raise object at 0x7da20c6c77c0>
|
keyword[def] identifier[disable_precompute] ( identifier[panel] ):
literal[string]
identifier[task_id] = identifier[panel] [ literal[string] ][ literal[string] ][ literal[string] ]
identifier[result] = identifier[scheduler_client] . identifier[cancel] ( identifier[task_id] )
keyword[if] identifier[result] [ literal[string] ]!= literal[string] :
keyword[raise] identifier[RuntimeError] ( identifier[result] . identifier[get] ( literal[string] ))
|
def disable_precompute(panel):
"""Cancel precomputation for `panel`"""
task_id = panel['data_source']['precompute']['task_id']
result = scheduler_client.cancel(task_id)
if result['status'] != 'success':
raise RuntimeError(result.get('reason')) # depends on [control=['if'], data=[]]
|
def generate_simple_call(opcode: int, index: int):
"""
Generates a simple call, with an index for something.
:param opcode: The opcode to generate.
:param index: The index to use as an argument.
:return:
"""
bs = b""
# add the opcode
bs += opcode.to_bytes(1, byteorder="little")
# Add the index
if isinstance(index, int):
if PY36:
bs += index.to_bytes(1, byteorder="little")
else:
bs += index.to_bytes(2, byteorder="little")
else:
bs += index
return bs
|
def function[generate_simple_call, parameter[opcode, index]]:
constant[
Generates a simple call, with an index for something.
:param opcode: The opcode to generate.
:param index: The index to use as an argument.
:return:
]
variable[bs] assign[=] constant[b'']
<ast.AugAssign object at 0x7da1aff1e440>
if call[name[isinstance], parameter[name[index], name[int]]] begin[:]
if name[PY36] begin[:]
<ast.AugAssign object at 0x7da1aff1c520>
return[name[bs]]
|
keyword[def] identifier[generate_simple_call] ( identifier[opcode] : identifier[int] , identifier[index] : identifier[int] ):
literal[string]
identifier[bs] = literal[string]
identifier[bs] += identifier[opcode] . identifier[to_bytes] ( literal[int] , identifier[byteorder] = literal[string] )
keyword[if] identifier[isinstance] ( identifier[index] , identifier[int] ):
keyword[if] identifier[PY36] :
identifier[bs] += identifier[index] . identifier[to_bytes] ( literal[int] , identifier[byteorder] = literal[string] )
keyword[else] :
identifier[bs] += identifier[index] . identifier[to_bytes] ( literal[int] , identifier[byteorder] = literal[string] )
keyword[else] :
identifier[bs] += identifier[index]
keyword[return] identifier[bs]
|
def generate_simple_call(opcode: int, index: int):
"""
Generates a simple call, with an index for something.
:param opcode: The opcode to generate.
:param index: The index to use as an argument.
:return:
"""
bs = b''
# add the opcode
bs += opcode.to_bytes(1, byteorder='little')
# Add the index
if isinstance(index, int):
if PY36:
bs += index.to_bytes(1, byteorder='little') # depends on [control=['if'], data=[]]
else:
bs += index.to_bytes(2, byteorder='little') # depends on [control=['if'], data=[]]
else:
bs += index
return bs
|
def Reset(self):
' Reset Axis and set default parameters for H-bridge '
spi.SPI_write(self.CS, [0xC0, 0x60]) # reset
# spi.SPI_write(self.CS, [0x14, 0x14]) # Stall Treshold setup
# spi.SPI_write(self.CS, [0xFF, 0xFF])
# spi.SPI_write(self.CS, [0x13, 0x13]) # Over Current Treshold setup
# spi.SPI_write(self.CS, [0xFF, 0xFF])
spi.SPI_write(self.CS, [0x15, 0xFF]) # Full Step speed
spi.SPI_write(self.CS, [0xFF, 0xFF])
spi.SPI_write(self.CS, [0xFF, 0xFF])
spi.SPI_write(self.CS, [0x05, 0x05]) # ACC
spi.SPI_write(self.CS, [0x01, 0x01])
spi.SPI_write(self.CS, [0xF5, 0xF5])
spi.SPI_write(self.CS, [0x06, 0x06]) # DEC
spi.SPI_write(self.CS, [0x01, 0x01])
spi.SPI_write(self.CS, [0xF5, 0xF5])
spi.SPI_write(self.CS, [0x0A, 0x0A]) # KVAL_RUN
spi.SPI_write(self.CS, [0x10, 0x10])
spi.SPI_write(self.CS, [0x0B, 0x0B]) # KVAL_ACC
spi.SPI_write(self.CS, [0x20, 0x20])
spi.SPI_write(self.CS, [0x0C, 0x0C]) # KVAL_DEC
spi.SPI_write(self.CS, [0x20, 0x20])
spi.SPI_write(self.CS, [0x18, 0x18]) # CONFIG
spi.SPI_write(self.CS, [0b00111000, 0b00111000])
spi.SPI_write(self.CS, [0b00000000, 0b00000000])
|
def function[Reset, parameter[self]]:
constant[ Reset Axis and set default parameters for H-bridge ]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da204567e80>, <ast.Constant object at 0x7da204566020>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da204565000>, <ast.Constant object at 0x7da2045676d0>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da2045659c0>, <ast.Constant object at 0x7da204565ab0>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da204566ef0>, <ast.Constant object at 0x7da204567a30>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da2045660e0>, <ast.Constant object at 0x7da204567c40>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da204566b60>, <ast.Constant object at 0x7da204564c70>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da2045643d0>, <ast.Constant object at 0x7da204565de0>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da204564df0>, <ast.Constant object at 0x7da204567eb0>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da2045655a0>, <ast.Constant object at 0x7da204567ca0>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da204566b90>, <ast.Constant object at 0x7da204567910>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da204566e00>, <ast.Constant object at 0x7da204567a90>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da204564790>, <ast.Constant object at 0x7da204567550>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da204565060>, <ast.Constant object at 0x7da204564a00>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da204565600>, <ast.Constant object at 0x7da204566ad0>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da204565e70>, <ast.Constant object at 0x7da204566e60>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da204564e80>, <ast.Constant object at 0x7da204567220>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da2045676a0>, <ast.Constant object at 0x7da204564fd0>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da2045644c0>, <ast.Constant object at 0x7da2045667a0>]]]]
call[name[spi].SPI_write, parameter[name[self].CS, list[[<ast.Constant object at 0x7da2045663e0>, <ast.Constant object at 0x7da204564160>]]]]
|
keyword[def] identifier[Reset] ( identifier[self] ):
literal[string]
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
identifier[spi] . identifier[SPI_write] ( identifier[self] . identifier[CS] ,[ literal[int] , literal[int] ])
|
def Reset(self):
""" Reset Axis and set default parameters for H-bridge """
spi.SPI_write(self.CS, [192, 96]) # reset
# spi.SPI_write(self.CS, [0x14, 0x14]) # Stall Treshold setup
# spi.SPI_write(self.CS, [0xFF, 0xFF])
# spi.SPI_write(self.CS, [0x13, 0x13]) # Over Current Treshold setup
# spi.SPI_write(self.CS, [0xFF, 0xFF])
spi.SPI_write(self.CS, [21, 255]) # Full Step speed
spi.SPI_write(self.CS, [255, 255])
spi.SPI_write(self.CS, [255, 255])
spi.SPI_write(self.CS, [5, 5]) # ACC
spi.SPI_write(self.CS, [1, 1])
spi.SPI_write(self.CS, [245, 245])
spi.SPI_write(self.CS, [6, 6]) # DEC
spi.SPI_write(self.CS, [1, 1])
spi.SPI_write(self.CS, [245, 245])
spi.SPI_write(self.CS, [10, 10]) # KVAL_RUN
spi.SPI_write(self.CS, [16, 16])
spi.SPI_write(self.CS, [11, 11]) # KVAL_ACC
spi.SPI_write(self.CS, [32, 32])
spi.SPI_write(self.CS, [12, 12]) # KVAL_DEC
spi.SPI_write(self.CS, [32, 32])
spi.SPI_write(self.CS, [24, 24]) # CONFIG
spi.SPI_write(self.CS, [56, 56])
spi.SPI_write(self.CS, [0, 0])
|
def draw_hsv(mag, ang, dtype=uint8, fn=None):
"""
mag must be uint8, uint16, uint32 and 2-D
ang is in radians (float)
"""
assert mag.shape == ang.shape
assert mag.ndim == 2
maxval = iinfo(dtype).max
hsv = dstack(((degrees(ang)/2).astype(dtype), # /2 to keep less than 255
ones_like(mag)*maxval, # maxval must be after in 1-D case
cv2.normalize(mag, alpha=0, beta=maxval, norm_type=cv2.NORM_MINMAX)))
rgb = cv2.cvtColor(hsv, cv2.COLOR_HSV2RGB)
if fn is not None:
print('writing ' + fn)
cv2.imwrite(fn, rgb)
return rgb
|
def function[draw_hsv, parameter[mag, ang, dtype, fn]]:
constant[
mag must be uint8, uint16, uint32 and 2-D
ang is in radians (float)
]
assert[compare[name[mag].shape equal[==] name[ang].shape]]
assert[compare[name[mag].ndim equal[==] constant[2]]]
variable[maxval] assign[=] call[name[iinfo], parameter[name[dtype]]].max
variable[hsv] assign[=] call[name[dstack], parameter[tuple[[<ast.Call object at 0x7da1b0f5ac50>, <ast.BinOp object at 0x7da1b0f5bd90>, <ast.Call object at 0x7da1b0f5a680>]]]]
variable[rgb] assign[=] call[name[cv2].cvtColor, parameter[name[hsv], name[cv2].COLOR_HSV2RGB]]
if compare[name[fn] is_not constant[None]] begin[:]
call[name[print], parameter[binary_operation[constant[writing ] + name[fn]]]]
call[name[cv2].imwrite, parameter[name[fn], name[rgb]]]
return[name[rgb]]
|
keyword[def] identifier[draw_hsv] ( identifier[mag] , identifier[ang] , identifier[dtype] = identifier[uint8] , identifier[fn] = keyword[None] ):
literal[string]
keyword[assert] identifier[mag] . identifier[shape] == identifier[ang] . identifier[shape]
keyword[assert] identifier[mag] . identifier[ndim] == literal[int]
identifier[maxval] = identifier[iinfo] ( identifier[dtype] ). identifier[max]
identifier[hsv] = identifier[dstack] ((( identifier[degrees] ( identifier[ang] )/ literal[int] ). identifier[astype] ( identifier[dtype] ),
identifier[ones_like] ( identifier[mag] )* identifier[maxval] ,
identifier[cv2] . identifier[normalize] ( identifier[mag] , identifier[alpha] = literal[int] , identifier[beta] = identifier[maxval] , identifier[norm_type] = identifier[cv2] . identifier[NORM_MINMAX] )))
identifier[rgb] = identifier[cv2] . identifier[cvtColor] ( identifier[hsv] , identifier[cv2] . identifier[COLOR_HSV2RGB] )
keyword[if] identifier[fn] keyword[is] keyword[not] keyword[None] :
identifier[print] ( literal[string] + identifier[fn] )
identifier[cv2] . identifier[imwrite] ( identifier[fn] , identifier[rgb] )
keyword[return] identifier[rgb]
|
def draw_hsv(mag, ang, dtype=uint8, fn=None):
"""
mag must be uint8, uint16, uint32 and 2-D
ang is in radians (float)
"""
assert mag.shape == ang.shape
assert mag.ndim == 2
maxval = iinfo(dtype).max # /2 to keep less than 255
# maxval must be after in 1-D case
hsv = dstack(((degrees(ang) / 2).astype(dtype), ones_like(mag) * maxval, cv2.normalize(mag, alpha=0, beta=maxval, norm_type=cv2.NORM_MINMAX)))
rgb = cv2.cvtColor(hsv, cv2.COLOR_HSV2RGB)
if fn is not None:
print('writing ' + fn)
cv2.imwrite(fn, rgb) # depends on [control=['if'], data=['fn']]
return rgb
|
def compute_triangle_circumcenters(X, ei_dot_ei, ei_dot_ej):
"""Computes the circumcenters of all given triangles.
"""
# The input argument are the dot products
#
# <e1, e2>
# <e2, e0>
# <e0, e1>
#
# of the edges
#
# e0: x1->x2,
# e1: x2->x0,
# e2: x0->x1.
#
# Note that edge e_i is opposite of node i and the edges add up to 0.
# The trilinear coordinates of the circumcenter are
#
# cos(alpha0) : cos(alpha1) : cos(alpha2)
#
# where alpha_k is the angle at point k, opposite of edge k. The Cartesian
# coordinates are (see
# <https://en.wikipedia.org/wiki/Trilinear_coordinates#Between_Cartesian_and_trilinear_coordinates>)
#
# C = sum_i ||e_i|| * cos(alpha_i)/beta * P_i
#
# with
#
# beta = sum ||e_i||*cos(alpha_i)
#
# Incidentally, the cosines are
#
# cos(alpha0) = <e1, e2> / ||e1|| / ||e2||,
#
# so in total
#
# C = <e_0, e0> <e1, e2> / sum_i (<e_i, e_i> <e{i+1}, e{i+2}>) P0
# + ... P1
# + ... P2.
#
# An even nicer formula is given on
# <https://en.wikipedia.org/wiki/Circumscribed_circle#Barycentric_coordinates>: The
# barycentric coordinates of the circumcenter are
#
# a^2 (b^2 + c^2 - a^2) : b^2 (c^2 + a^2 - b^2) : c^2 (a^2 + b^2 - c^2).
#
# This is only using the squared edge lengths, too!
#
alpha = ei_dot_ei * ei_dot_ej
alpha_sum = alpha[0] + alpha[1] + alpha[2]
beta = alpha / alpha_sum[None]
a = X * beta[..., None]
cc = a[0] + a[1] + a[2]
# alpha = numpy.array([
# ei_dot_ei[0] * (ei_dot_ei[1] + ei_dot_ei[2] - ei_dot_ei[0]),
# ei_dot_ei[1] * (ei_dot_ei[2] + ei_dot_ei[0] - ei_dot_ei[1]),
# ei_dot_ei[2] * (ei_dot_ei[0] + ei_dot_ei[1] - ei_dot_ei[2]),
# ])
# alpha /= numpy.sum(alpha, axis=0)
# cc = (X[0].T * alpha[0] + X[1].T * alpha[1] + X[2].T * alpha[2]).T
return cc
|
def function[compute_triangle_circumcenters, parameter[X, ei_dot_ei, ei_dot_ej]]:
constant[Computes the circumcenters of all given triangles.
]
variable[alpha] assign[=] binary_operation[name[ei_dot_ei] * name[ei_dot_ej]]
variable[alpha_sum] assign[=] binary_operation[binary_operation[call[name[alpha]][constant[0]] + call[name[alpha]][constant[1]]] + call[name[alpha]][constant[2]]]
variable[beta] assign[=] binary_operation[name[alpha] / call[name[alpha_sum]][constant[None]]]
variable[a] assign[=] binary_operation[name[X] * call[name[beta]][tuple[[<ast.Constant object at 0x7da20c76c0a0>, <ast.Constant object at 0x7da20c76fe50>]]]]
variable[cc] assign[=] binary_operation[binary_operation[call[name[a]][constant[0]] + call[name[a]][constant[1]]] + call[name[a]][constant[2]]]
return[name[cc]]
|
keyword[def] identifier[compute_triangle_circumcenters] ( identifier[X] , identifier[ei_dot_ei] , identifier[ei_dot_ej] ):
literal[string]
identifier[alpha] = identifier[ei_dot_ei] * identifier[ei_dot_ej]
identifier[alpha_sum] = identifier[alpha] [ literal[int] ]+ identifier[alpha] [ literal[int] ]+ identifier[alpha] [ literal[int] ]
identifier[beta] = identifier[alpha] / identifier[alpha_sum] [ keyword[None] ]
identifier[a] = identifier[X] * identifier[beta] [..., keyword[None] ]
identifier[cc] = identifier[a] [ literal[int] ]+ identifier[a] [ literal[int] ]+ identifier[a] [ literal[int] ]
keyword[return] identifier[cc]
|
def compute_triangle_circumcenters(X, ei_dot_ei, ei_dot_ej):
"""Computes the circumcenters of all given triangles.
"""
# The input argument are the dot products
#
# <e1, e2>
# <e2, e0>
# <e0, e1>
#
# of the edges
#
# e0: x1->x2,
# e1: x2->x0,
# e2: x0->x1.
#
# Note that edge e_i is opposite of node i and the edges add up to 0.
# The trilinear coordinates of the circumcenter are
#
# cos(alpha0) : cos(alpha1) : cos(alpha2)
#
# where alpha_k is the angle at point k, opposite of edge k. The Cartesian
# coordinates are (see
# <https://en.wikipedia.org/wiki/Trilinear_coordinates#Between_Cartesian_and_trilinear_coordinates>)
#
# C = sum_i ||e_i|| * cos(alpha_i)/beta * P_i
#
# with
#
# beta = sum ||e_i||*cos(alpha_i)
#
# Incidentally, the cosines are
#
# cos(alpha0) = <e1, e2> / ||e1|| / ||e2||,
#
# so in total
#
# C = <e_0, e0> <e1, e2> / sum_i (<e_i, e_i> <e{i+1}, e{i+2}>) P0
# + ... P1
# + ... P2.
#
# An even nicer formula is given on
# <https://en.wikipedia.org/wiki/Circumscribed_circle#Barycentric_coordinates>: The
# barycentric coordinates of the circumcenter are
#
# a^2 (b^2 + c^2 - a^2) : b^2 (c^2 + a^2 - b^2) : c^2 (a^2 + b^2 - c^2).
#
# This is only using the squared edge lengths, too!
#
alpha = ei_dot_ei * ei_dot_ej
alpha_sum = alpha[0] + alpha[1] + alpha[2]
beta = alpha / alpha_sum[None]
a = X * beta[..., None]
cc = a[0] + a[1] + a[2]
# alpha = numpy.array([
# ei_dot_ei[0] * (ei_dot_ei[1] + ei_dot_ei[2] - ei_dot_ei[0]),
# ei_dot_ei[1] * (ei_dot_ei[2] + ei_dot_ei[0] - ei_dot_ei[1]),
# ei_dot_ei[2] * (ei_dot_ei[0] + ei_dot_ei[1] - ei_dot_ei[2]),
# ])
# alpha /= numpy.sum(alpha, axis=0)
# cc = (X[0].T * alpha[0] + X[1].T * alpha[1] + X[2].T * alpha[2]).T
return cc
|
def draw_group_labels(self):
"""
Renders group labels to the figure.
"""
for i, label in enumerate(self.groups):
label_x = self.group_label_coords["x"][i]
label_y = self.group_label_coords["y"][i]
label_ha = self.group_label_aligns["has"][i]
label_va = self.group_label_aligns["vas"][i]
color = self.group_label_color[i]
self.ax.text(
s=label,
x=label_x,
y=label_y,
ha=label_ha,
va=label_va,
color=color,
fontsize=self.fontsize,
family=self.fontfamily,
)
|
def function[draw_group_labels, parameter[self]]:
constant[
Renders group labels to the figure.
]
for taget[tuple[[<ast.Name object at 0x7da1b1d9ab30>, <ast.Name object at 0x7da1b1d9a620>]]] in starred[call[name[enumerate], parameter[name[self].groups]]] begin[:]
variable[label_x] assign[=] call[call[name[self].group_label_coords][constant[x]]][name[i]]
variable[label_y] assign[=] call[call[name[self].group_label_coords][constant[y]]][name[i]]
variable[label_ha] assign[=] call[call[name[self].group_label_aligns][constant[has]]][name[i]]
variable[label_va] assign[=] call[call[name[self].group_label_aligns][constant[vas]]][name[i]]
variable[color] assign[=] call[name[self].group_label_color][name[i]]
call[name[self].ax.text, parameter[]]
|
keyword[def] identifier[draw_group_labels] ( identifier[self] ):
literal[string]
keyword[for] identifier[i] , identifier[label] keyword[in] identifier[enumerate] ( identifier[self] . identifier[groups] ):
identifier[label_x] = identifier[self] . identifier[group_label_coords] [ literal[string] ][ identifier[i] ]
identifier[label_y] = identifier[self] . identifier[group_label_coords] [ literal[string] ][ identifier[i] ]
identifier[label_ha] = identifier[self] . identifier[group_label_aligns] [ literal[string] ][ identifier[i] ]
identifier[label_va] = identifier[self] . identifier[group_label_aligns] [ literal[string] ][ identifier[i] ]
identifier[color] = identifier[self] . identifier[group_label_color] [ identifier[i] ]
identifier[self] . identifier[ax] . identifier[text] (
identifier[s] = identifier[label] ,
identifier[x] = identifier[label_x] ,
identifier[y] = identifier[label_y] ,
identifier[ha] = identifier[label_ha] ,
identifier[va] = identifier[label_va] ,
identifier[color] = identifier[color] ,
identifier[fontsize] = identifier[self] . identifier[fontsize] ,
identifier[family] = identifier[self] . identifier[fontfamily] ,
)
|
def draw_group_labels(self):
"""
Renders group labels to the figure.
"""
for (i, label) in enumerate(self.groups):
label_x = self.group_label_coords['x'][i]
label_y = self.group_label_coords['y'][i]
label_ha = self.group_label_aligns['has'][i]
label_va = self.group_label_aligns['vas'][i]
color = self.group_label_color[i]
self.ax.text(s=label, x=label_x, y=label_y, ha=label_ha, va=label_va, color=color, fontsize=self.fontsize, family=self.fontfamily) # depends on [control=['for'], data=[]]
|
def inasafe_place_value_coefficient(number, feature, parent):
"""Given a number, it will return the coefficient of the place value name.
For instance:
* inasafe_place_value_coefficient(10) -> 1
* inasafe_place_value_coefficient(1700) -> 1.7
It needs to be used with inasafe_number_denomination_unit.
"""
_ = feature, parent # NOQA
if number >= 0:
rounded_number = round_affected_number(
number,
use_rounding=True,
use_population_rounding=True
)
min_number = 1000
value, unit = denomination(rounded_number, min_number)
if number < min_number:
rounded_number = int(round(value, 1))
else:
rounded_number = round(value, 1)
return str(rounded_number)
else:
return None
|
def function[inasafe_place_value_coefficient, parameter[number, feature, parent]]:
constant[Given a number, it will return the coefficient of the place value name.
For instance:
* inasafe_place_value_coefficient(10) -> 1
* inasafe_place_value_coefficient(1700) -> 1.7
It needs to be used with inasafe_number_denomination_unit.
]
variable[_] assign[=] tuple[[<ast.Name object at 0x7da1b0c0f1f0>, <ast.Name object at 0x7da1b0c0d600>]]
if compare[name[number] greater_or_equal[>=] constant[0]] begin[:]
variable[rounded_number] assign[=] call[name[round_affected_number], parameter[name[number]]]
variable[min_number] assign[=] constant[1000]
<ast.Tuple object at 0x7da1b0c0c4c0> assign[=] call[name[denomination], parameter[name[rounded_number], name[min_number]]]
if compare[name[number] less[<] name[min_number]] begin[:]
variable[rounded_number] assign[=] call[name[int], parameter[call[name[round], parameter[name[value], constant[1]]]]]
return[call[name[str], parameter[name[rounded_number]]]]
|
keyword[def] identifier[inasafe_place_value_coefficient] ( identifier[number] , identifier[feature] , identifier[parent] ):
literal[string]
identifier[_] = identifier[feature] , identifier[parent]
keyword[if] identifier[number] >= literal[int] :
identifier[rounded_number] = identifier[round_affected_number] (
identifier[number] ,
identifier[use_rounding] = keyword[True] ,
identifier[use_population_rounding] = keyword[True]
)
identifier[min_number] = literal[int]
identifier[value] , identifier[unit] = identifier[denomination] ( identifier[rounded_number] , identifier[min_number] )
keyword[if] identifier[number] < identifier[min_number] :
identifier[rounded_number] = identifier[int] ( identifier[round] ( identifier[value] , literal[int] ))
keyword[else] :
identifier[rounded_number] = identifier[round] ( identifier[value] , literal[int] )
keyword[return] identifier[str] ( identifier[rounded_number] )
keyword[else] :
keyword[return] keyword[None]
|
def inasafe_place_value_coefficient(number, feature, parent):
"""Given a number, it will return the coefficient of the place value name.
For instance:
* inasafe_place_value_coefficient(10) -> 1
* inasafe_place_value_coefficient(1700) -> 1.7
It needs to be used with inasafe_number_denomination_unit.
"""
_ = (feature, parent) # NOQA
if number >= 0:
rounded_number = round_affected_number(number, use_rounding=True, use_population_rounding=True)
min_number = 1000
(value, unit) = denomination(rounded_number, min_number)
if number < min_number:
rounded_number = int(round(value, 1)) # depends on [control=['if'], data=[]]
else:
rounded_number = round(value, 1)
return str(rounded_number) # depends on [control=['if'], data=['number']]
else:
return None
|
def _get_public_key_count(self):
"""Return the count of public keys in the list and embedded."""
index = len(self._public_keys)
for authentication in self._authentications:
if authentication.is_public_key():
index += 1
return index
|
def function[_get_public_key_count, parameter[self]]:
constant[Return the count of public keys in the list and embedded.]
variable[index] assign[=] call[name[len], parameter[name[self]._public_keys]]
for taget[name[authentication]] in starred[name[self]._authentications] begin[:]
if call[name[authentication].is_public_key, parameter[]] begin[:]
<ast.AugAssign object at 0x7da18f09d090>
return[name[index]]
|
keyword[def] identifier[_get_public_key_count] ( identifier[self] ):
literal[string]
identifier[index] = identifier[len] ( identifier[self] . identifier[_public_keys] )
keyword[for] identifier[authentication] keyword[in] identifier[self] . identifier[_authentications] :
keyword[if] identifier[authentication] . identifier[is_public_key] ():
identifier[index] += literal[int]
keyword[return] identifier[index]
|
def _get_public_key_count(self):
"""Return the count of public keys in the list and embedded."""
index = len(self._public_keys)
for authentication in self._authentications:
if authentication.is_public_key():
index += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['authentication']]
return index
|
async def copy_from_query(self, query, *args, output,
timeout=None, format=None, oids=None,
delimiter=None, null=None, header=None,
quote=None, escape=None, force_quote=None,
encoding=None):
"""Copy the results of a query to a file or file-like object.
:param str query:
The query to copy the results of.
:param args:
Query arguments.
:param output:
A :term:`path-like object <python:path-like object>`,
or a :term:`file-like object <python:file-like object>`, or
a :term:`coroutine function <python:coroutine function>`
that takes a ``bytes`` instance as a sole argument.
:param float timeout:
Optional timeout value in seconds.
The remaining keyword arguments are ``COPY`` statement options,
see `COPY statement documentation`_ for details.
:return: The status string of the COPY command.
Example:
.. code-block:: pycon
>>> import asyncpg
>>> import asyncio
>>> async def run():
... con = await asyncpg.connect(user='postgres')
... result = await con.copy_from_query(
... 'SELECT foo, bar FROM mytable WHERE foo > $1', 10,
... output='file.csv', format='csv')
... print(result)
...
>>> asyncio.get_event_loop().run_until_complete(run())
'COPY 10'
.. _`COPY statement documentation`:
https://www.postgresql.org/docs/current/static/sql-copy.html
.. versionadded:: 0.11.0
"""
opts = self._format_copy_opts(
format=format, oids=oids, delimiter=delimiter,
null=null, header=header, quote=quote, escape=escape,
force_quote=force_quote, encoding=encoding
)
if args:
query = await utils._mogrify(self, query, args)
copy_stmt = 'COPY ({query}) TO STDOUT {opts}'.format(
query=query, opts=opts)
return await self._copy_out(copy_stmt, output, timeout)
|
<ast.AsyncFunctionDef object at 0x7da1b1956ef0>
|
keyword[async] keyword[def] identifier[copy_from_query] ( identifier[self] , identifier[query] ,* identifier[args] , identifier[output] ,
identifier[timeout] = keyword[None] , identifier[format] = keyword[None] , identifier[oids] = keyword[None] ,
identifier[delimiter] = keyword[None] , identifier[null] = keyword[None] , identifier[header] = keyword[None] ,
identifier[quote] = keyword[None] , identifier[escape] = keyword[None] , identifier[force_quote] = keyword[None] ,
identifier[encoding] = keyword[None] ):
literal[string]
identifier[opts] = identifier[self] . identifier[_format_copy_opts] (
identifier[format] = identifier[format] , identifier[oids] = identifier[oids] , identifier[delimiter] = identifier[delimiter] ,
identifier[null] = identifier[null] , identifier[header] = identifier[header] , identifier[quote] = identifier[quote] , identifier[escape] = identifier[escape] ,
identifier[force_quote] = identifier[force_quote] , identifier[encoding] = identifier[encoding]
)
keyword[if] identifier[args] :
identifier[query] = keyword[await] identifier[utils] . identifier[_mogrify] ( identifier[self] , identifier[query] , identifier[args] )
identifier[copy_stmt] = literal[string] . identifier[format] (
identifier[query] = identifier[query] , identifier[opts] = identifier[opts] )
keyword[return] keyword[await] identifier[self] . identifier[_copy_out] ( identifier[copy_stmt] , identifier[output] , identifier[timeout] )
|
async def copy_from_query(self, query, *args, output, timeout=None, format=None, oids=None, delimiter=None, null=None, header=None, quote=None, escape=None, force_quote=None, encoding=None):
"""Copy the results of a query to a file or file-like object.
:param str query:
The query to copy the results of.
:param args:
Query arguments.
:param output:
A :term:`path-like object <python:path-like object>`,
or a :term:`file-like object <python:file-like object>`, or
a :term:`coroutine function <python:coroutine function>`
that takes a ``bytes`` instance as a sole argument.
:param float timeout:
Optional timeout value in seconds.
The remaining keyword arguments are ``COPY`` statement options,
see `COPY statement documentation`_ for details.
:return: The status string of the COPY command.
Example:
.. code-block:: pycon
>>> import asyncpg
>>> import asyncio
>>> async def run():
... con = await asyncpg.connect(user='postgres')
... result = await con.copy_from_query(
... 'SELECT foo, bar FROM mytable WHERE foo > $1', 10,
... output='file.csv', format='csv')
... print(result)
...
>>> asyncio.get_event_loop().run_until_complete(run())
'COPY 10'
.. _`COPY statement documentation`:
https://www.postgresql.org/docs/current/static/sql-copy.html
.. versionadded:: 0.11.0
"""
opts = self._format_copy_opts(format=format, oids=oids, delimiter=delimiter, null=null, header=header, quote=quote, escape=escape, force_quote=force_quote, encoding=encoding)
if args:
query = await utils._mogrify(self, query, args) # depends on [control=['if'], data=[]]
copy_stmt = 'COPY ({query}) TO STDOUT {opts}'.format(query=query, opts=opts)
return await self._copy_out(copy_stmt, output, timeout)
|
def get_user_id(remote, email):
"""Get the Globus identity for a users given email.
A Globus ID is a UUID that can uniquely identify a Globus user. See the
docs here for v2/api/identities
https://docs.globus.org/api/auth/reference/
"""
try:
url = '{}?usernames={}'.format(GLOBUS_USER_ID_URL, email)
user_id = get_dict_from_response(remote.get(url))
return user_id['identities'][0]['id']
except KeyError:
# If we got here the response was successful but the data was invalid.
# It's likely the URL is wrong but possible the API has changed.
raise OAuthResponseError('Failed to fetch user id, likely server '
'mis-configuration', None, remote)
|
def function[get_user_id, parameter[remote, email]]:
constant[Get the Globus identity for a users given email.
A Globus ID is a UUID that can uniquely identify a Globus user. See the
docs here for v2/api/identities
https://docs.globus.org/api/auth/reference/
]
<ast.Try object at 0x7da18dc986a0>
|
keyword[def] identifier[get_user_id] ( identifier[remote] , identifier[email] ):
literal[string]
keyword[try] :
identifier[url] = literal[string] . identifier[format] ( identifier[GLOBUS_USER_ID_URL] , identifier[email] )
identifier[user_id] = identifier[get_dict_from_response] ( identifier[remote] . identifier[get] ( identifier[url] ))
keyword[return] identifier[user_id] [ literal[string] ][ literal[int] ][ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[OAuthResponseError] ( literal[string]
literal[string] , keyword[None] , identifier[remote] )
|
def get_user_id(remote, email):
"""Get the Globus identity for a users given email.
A Globus ID is a UUID that can uniquely identify a Globus user. See the
docs here for v2/api/identities
https://docs.globus.org/api/auth/reference/
"""
try:
url = '{}?usernames={}'.format(GLOBUS_USER_ID_URL, email)
user_id = get_dict_from_response(remote.get(url))
return user_id['identities'][0]['id'] # depends on [control=['try'], data=[]]
except KeyError:
# If we got here the response was successful but the data was invalid.
# It's likely the URL is wrong but possible the API has changed.
raise OAuthResponseError('Failed to fetch user id, likely server mis-configuration', None, remote) # depends on [control=['except'], data=[]]
|
def server_poweroff(host=None,
admin_username=None,
admin_password=None,
module=None):
'''
Powers down the managed server.
host
The chassis host.
admin_username
The username used to access the chassis.
admin_password
The password used to access the chassis.
module
The element to power off on the chassis such as a blade.
If not provided, the chassis will be powered off.
CLI Example:
.. code-block:: bash
salt dell dracr.server_poweroff
salt dell dracr.server_poweroff module=server-1
'''
return __execute_cmd('serveraction powerdown',
host=host, admin_username=admin_username,
admin_password=admin_password, module=module)
|
def function[server_poweroff, parameter[host, admin_username, admin_password, module]]:
constant[
Powers down the managed server.
host
The chassis host.
admin_username
The username used to access the chassis.
admin_password
The password used to access the chassis.
module
The element to power off on the chassis such as a blade.
If not provided, the chassis will be powered off.
CLI Example:
.. code-block:: bash
salt dell dracr.server_poweroff
salt dell dracr.server_poweroff module=server-1
]
return[call[name[__execute_cmd], parameter[constant[serveraction powerdown]]]]
|
keyword[def] identifier[server_poweroff] ( identifier[host] = keyword[None] ,
identifier[admin_username] = keyword[None] ,
identifier[admin_password] = keyword[None] ,
identifier[module] = keyword[None] ):
literal[string]
keyword[return] identifier[__execute_cmd] ( literal[string] ,
identifier[host] = identifier[host] , identifier[admin_username] = identifier[admin_username] ,
identifier[admin_password] = identifier[admin_password] , identifier[module] = identifier[module] )
|
def server_poweroff(host=None, admin_username=None, admin_password=None, module=None):
"""
Powers down the managed server.
host
The chassis host.
admin_username
The username used to access the chassis.
admin_password
The password used to access the chassis.
module
The element to power off on the chassis such as a blade.
If not provided, the chassis will be powered off.
CLI Example:
.. code-block:: bash
salt dell dracr.server_poweroff
salt dell dracr.server_poweroff module=server-1
"""
return __execute_cmd('serveraction powerdown', host=host, admin_username=admin_username, admin_password=admin_password, module=module)
|
def hmac_md5(s, salt):
"""
获取一个字符串的 使用 salt 加密的 hmac MD5 值
:param:
* s: (string) 需要进行 hash 的字符串
* salt: (string) 随机字符串
:return:
* result: (string) 32位小写 MD5 值
"""
hmac_md5 = hmac.new(salt.encode('utf-8'), s.encode('utf-8'),
digestmod=hashlib.md5).hexdigest()
return hmac_md5
|
def function[hmac_md5, parameter[s, salt]]:
constant[
获取一个字符串的 使用 salt 加密的 hmac MD5 值
:param:
* s: (string) 需要进行 hash 的字符串
* salt: (string) 随机字符串
:return:
* result: (string) 32位小写 MD5 值
]
variable[hmac_md5] assign[=] call[call[name[hmac].new, parameter[call[name[salt].encode, parameter[constant[utf-8]]], call[name[s].encode, parameter[constant[utf-8]]]]].hexdigest, parameter[]]
return[name[hmac_md5]]
|
keyword[def] identifier[hmac_md5] ( identifier[s] , identifier[salt] ):
literal[string]
identifier[hmac_md5] = identifier[hmac] . identifier[new] ( identifier[salt] . identifier[encode] ( literal[string] ), identifier[s] . identifier[encode] ( literal[string] ),
identifier[digestmod] = identifier[hashlib] . identifier[md5] ). identifier[hexdigest] ()
keyword[return] identifier[hmac_md5]
|
def hmac_md5(s, salt):
"""
获取一个字符串的 使用 salt 加密的 hmac MD5 值
:param:
* s: (string) 需要进行 hash 的字符串
* salt: (string) 随机字符串
:return:
* result: (string) 32位小写 MD5 值
"""
hmac_md5 = hmac.new(salt.encode('utf-8'), s.encode('utf-8'), digestmod=hashlib.md5).hexdigest()
return hmac_md5
|
def RestoreTaskStoreFactory(store_class, chunk_size, restore_file, save_file):
"""
Restores a task store from file.
"""
intm_results = np.load(restore_file)
intm = intm_results[intm_results.files[0]]
idx = np.isnan(intm).flatten().nonzero()[0]
partitions = math.ceil(len(idx) / float(chunk_size))
task_store = store_class(partitions, idx.tolist(), save_file)
task_store.num_tasks = len(idx)
# Also set up matrices for saving results
for f in intm_results.files:
task_store.__dict__[f] = intm_results[f]
return task_store
|
def function[RestoreTaskStoreFactory, parameter[store_class, chunk_size, restore_file, save_file]]:
constant[
Restores a task store from file.
]
variable[intm_results] assign[=] call[name[np].load, parameter[name[restore_file]]]
variable[intm] assign[=] call[name[intm_results]][call[name[intm_results].files][constant[0]]]
variable[idx] assign[=] call[call[call[call[name[np].isnan, parameter[name[intm]]].flatten, parameter[]].nonzero, parameter[]]][constant[0]]
variable[partitions] assign[=] call[name[math].ceil, parameter[binary_operation[call[name[len], parameter[name[idx]]] / call[name[float], parameter[name[chunk_size]]]]]]
variable[task_store] assign[=] call[name[store_class], parameter[name[partitions], call[name[idx].tolist, parameter[]], name[save_file]]]
name[task_store].num_tasks assign[=] call[name[len], parameter[name[idx]]]
for taget[name[f]] in starred[name[intm_results].files] begin[:]
call[name[task_store].__dict__][name[f]] assign[=] call[name[intm_results]][name[f]]
return[name[task_store]]
|
keyword[def] identifier[RestoreTaskStoreFactory] ( identifier[store_class] , identifier[chunk_size] , identifier[restore_file] , identifier[save_file] ):
literal[string]
identifier[intm_results] = identifier[np] . identifier[load] ( identifier[restore_file] )
identifier[intm] = identifier[intm_results] [ identifier[intm_results] . identifier[files] [ literal[int] ]]
identifier[idx] = identifier[np] . identifier[isnan] ( identifier[intm] ). identifier[flatten] (). identifier[nonzero] ()[ literal[int] ]
identifier[partitions] = identifier[math] . identifier[ceil] ( identifier[len] ( identifier[idx] )/ identifier[float] ( identifier[chunk_size] ))
identifier[task_store] = identifier[store_class] ( identifier[partitions] , identifier[idx] . identifier[tolist] (), identifier[save_file] )
identifier[task_store] . identifier[num_tasks] = identifier[len] ( identifier[idx] )
keyword[for] identifier[f] keyword[in] identifier[intm_results] . identifier[files] :
identifier[task_store] . identifier[__dict__] [ identifier[f] ]= identifier[intm_results] [ identifier[f] ]
keyword[return] identifier[task_store]
|
def RestoreTaskStoreFactory(store_class, chunk_size, restore_file, save_file):
"""
Restores a task store from file.
"""
intm_results = np.load(restore_file)
intm = intm_results[intm_results.files[0]]
idx = np.isnan(intm).flatten().nonzero()[0]
partitions = math.ceil(len(idx) / float(chunk_size))
task_store = store_class(partitions, idx.tolist(), save_file)
task_store.num_tasks = len(idx)
# Also set up matrices for saving results
for f in intm_results.files:
task_store.__dict__[f] = intm_results[f] # depends on [control=['for'], data=['f']]
return task_store
|
def create_dev_cert(name, dest):
"""
A command to generate a self signed certificate for dev purposes.
:param name: name ro be given to the cert and key
:param dest: location on local filesystem to store the generated files
:return:
"""
if not os.path.exists(dest):
os.makedirs(dest)
keygen.gen_ca_cert(name, dest, 3650)
|
def function[create_dev_cert, parameter[name, dest]]:
constant[
A command to generate a self signed certificate for dev purposes.
:param name: name ro be given to the cert and key
:param dest: location on local filesystem to store the generated files
:return:
]
if <ast.UnaryOp object at 0x7da1b09d2260> begin[:]
call[name[os].makedirs, parameter[name[dest]]]
call[name[keygen].gen_ca_cert, parameter[name[name], name[dest], constant[3650]]]
|
keyword[def] identifier[create_dev_cert] ( identifier[name] , identifier[dest] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[dest] ):
identifier[os] . identifier[makedirs] ( identifier[dest] )
identifier[keygen] . identifier[gen_ca_cert] ( identifier[name] , identifier[dest] , literal[int] )
|
def create_dev_cert(name, dest):
"""
A command to generate a self signed certificate for dev purposes.
:param name: name ro be given to the cert and key
:param dest: location on local filesystem to store the generated files
:return:
"""
if not os.path.exists(dest):
os.makedirs(dest) # depends on [control=['if'], data=[]]
keygen.gen_ca_cert(name, dest, 3650)
|
def analyze_script(self, index=None):
"""Analyze current script with todos"""
if self.is_analysis_done:
return
if index is None:
index = self.get_stack_index()
if self.data:
finfo = self.data[index]
if self.todolist_enabled:
finfo.run_todo_finder()
self.is_analysis_done = True
|
def function[analyze_script, parameter[self, index]]:
constant[Analyze current script with todos]
if name[self].is_analysis_done begin[:]
return[None]
if compare[name[index] is constant[None]] begin[:]
variable[index] assign[=] call[name[self].get_stack_index, parameter[]]
if name[self].data begin[:]
variable[finfo] assign[=] call[name[self].data][name[index]]
if name[self].todolist_enabled begin[:]
call[name[finfo].run_todo_finder, parameter[]]
name[self].is_analysis_done assign[=] constant[True]
|
keyword[def] identifier[analyze_script] ( identifier[self] , identifier[index] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[is_analysis_done] :
keyword[return]
keyword[if] identifier[index] keyword[is] keyword[None] :
identifier[index] = identifier[self] . identifier[get_stack_index] ()
keyword[if] identifier[self] . identifier[data] :
identifier[finfo] = identifier[self] . identifier[data] [ identifier[index] ]
keyword[if] identifier[self] . identifier[todolist_enabled] :
identifier[finfo] . identifier[run_todo_finder] ()
identifier[self] . identifier[is_analysis_done] = keyword[True]
|
def analyze_script(self, index=None):
"""Analyze current script with todos"""
if self.is_analysis_done:
return # depends on [control=['if'], data=[]]
if index is None:
index = self.get_stack_index() # depends on [control=['if'], data=['index']]
if self.data:
finfo = self.data[index]
if self.todolist_enabled:
finfo.run_todo_finder() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
self.is_analysis_done = True
|
def add_new_observations(self, y, exogenous=None, **kwargs):
"""Update the endog/exog samples after a model fit.
After fitting your model and creating forecasts, you're going
to need to attach new samples to the data you fit on. These are
used to compute new forecasts (but using the same estimated
parameters).
Parameters
----------
y : array-like or iterable, shape=(n_samples,)
The time-series data to add to the endogenous samples on which the
``ARIMA`` estimator was previously fit. This may either be a Pandas
``Series`` object or a numpy array. This should be a one-
dimensional array of finite floats.
exogenous : array-like, shape=[n_obs, n_vars], optional (default=None)
An optional 2-d array of exogenous variables. If the model was
fit with an exogenous array of covariates, it will be required for
updating the observed values.
**kwargs : keyword args
Any keyword args that should be passed as ``**fit_kwargs`` in the
new model fit.
"""
return self.update(y, exogenous, **kwargs)
|
def function[add_new_observations, parameter[self, y, exogenous]]:
constant[Update the endog/exog samples after a model fit.
After fitting your model and creating forecasts, you're going
to need to attach new samples to the data you fit on. These are
used to compute new forecasts (but using the same estimated
parameters).
Parameters
----------
y : array-like or iterable, shape=(n_samples,)
The time-series data to add to the endogenous samples on which the
``ARIMA`` estimator was previously fit. This may either be a Pandas
``Series`` object or a numpy array. This should be a one-
dimensional array of finite floats.
exogenous : array-like, shape=[n_obs, n_vars], optional (default=None)
An optional 2-d array of exogenous variables. If the model was
fit with an exogenous array of covariates, it will be required for
updating the observed values.
**kwargs : keyword args
Any keyword args that should be passed as ``**fit_kwargs`` in the
new model fit.
]
return[call[name[self].update, parameter[name[y], name[exogenous]]]]
|
keyword[def] identifier[add_new_observations] ( identifier[self] , identifier[y] , identifier[exogenous] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[update] ( identifier[y] , identifier[exogenous] ,** identifier[kwargs] )
|
def add_new_observations(self, y, exogenous=None, **kwargs):
"""Update the endog/exog samples after a model fit.
After fitting your model and creating forecasts, you're going
to need to attach new samples to the data you fit on. These are
used to compute new forecasts (but using the same estimated
parameters).
Parameters
----------
y : array-like or iterable, shape=(n_samples,)
The time-series data to add to the endogenous samples on which the
``ARIMA`` estimator was previously fit. This may either be a Pandas
``Series`` object or a numpy array. This should be a one-
dimensional array of finite floats.
exogenous : array-like, shape=[n_obs, n_vars], optional (default=None)
An optional 2-d array of exogenous variables. If the model was
fit with an exogenous array of covariates, it will be required for
updating the observed values.
**kwargs : keyword args
Any keyword args that should be passed as ``**fit_kwargs`` in the
new model fit.
"""
return self.update(y, exogenous, **kwargs)
|
def add_minrmsd_to_ref(self, ref, ref_frame=0, atom_indices=None, precentered=False):
r"""
Adds the minimum root-mean-square-deviation (minrmsd) with respect to a reference structure to the feature list.
Parameters
----------
ref:
Reference structure for computing the minrmsd. Can be of two types:
1. :py:obj:`mdtraj.Trajectory` object
2. filename for mdtraj to load. In this case, only the :py:obj:`ref_frame` of that file will be used.
ref_frame: integer, default=0
Reference frame of the filename specified in :py:obj:`ref`.
This parameter has no effect if :py:obj:`ref` is not a filename.
atom_indices: array_like, default=None
Atoms that will be used for:
1. aligning the target and reference geometries.
2. computing rmsd after the alignment.
If left to None, all atoms of :py:obj:`ref` will be used.
precentered: bool, default=False
Use this boolean at your own risk to let mdtraj know that the target conformations are already
centered at the origin, i.e., their (uniformly weighted) center of mass lies at the origin.
This will speed up the computation of the rmsd.
"""
from .misc import MinRmsdFeature
f = MinRmsdFeature(ref, ref_frame=ref_frame, atom_indices=atom_indices, topology=self.topology,
precentered=precentered)
self.__add_feature(f)
|
def function[add_minrmsd_to_ref, parameter[self, ref, ref_frame, atom_indices, precentered]]:
constant[
Adds the minimum root-mean-square-deviation (minrmsd) with respect to a reference structure to the feature list.
Parameters
----------
ref:
Reference structure for computing the minrmsd. Can be of two types:
1. :py:obj:`mdtraj.Trajectory` object
2. filename for mdtraj to load. In this case, only the :py:obj:`ref_frame` of that file will be used.
ref_frame: integer, default=0
Reference frame of the filename specified in :py:obj:`ref`.
This parameter has no effect if :py:obj:`ref` is not a filename.
atom_indices: array_like, default=None
Atoms that will be used for:
1. aligning the target and reference geometries.
2. computing rmsd after the alignment.
If left to None, all atoms of :py:obj:`ref` will be used.
precentered: bool, default=False
Use this boolean at your own risk to let mdtraj know that the target conformations are already
centered at the origin, i.e., their (uniformly weighted) center of mass lies at the origin.
This will speed up the computation of the rmsd.
]
from relative_module[misc] import module[MinRmsdFeature]
variable[f] assign[=] call[name[MinRmsdFeature], parameter[name[ref]]]
call[name[self].__add_feature, parameter[name[f]]]
|
keyword[def] identifier[add_minrmsd_to_ref] ( identifier[self] , identifier[ref] , identifier[ref_frame] = literal[int] , identifier[atom_indices] = keyword[None] , identifier[precentered] = keyword[False] ):
literal[string]
keyword[from] . identifier[misc] keyword[import] identifier[MinRmsdFeature]
identifier[f] = identifier[MinRmsdFeature] ( identifier[ref] , identifier[ref_frame] = identifier[ref_frame] , identifier[atom_indices] = identifier[atom_indices] , identifier[topology] = identifier[self] . identifier[topology] ,
identifier[precentered] = identifier[precentered] )
identifier[self] . identifier[__add_feature] ( identifier[f] )
|
def add_minrmsd_to_ref(self, ref, ref_frame=0, atom_indices=None, precentered=False):
"""
Adds the minimum root-mean-square-deviation (minrmsd) with respect to a reference structure to the feature list.
Parameters
----------
ref:
Reference structure for computing the minrmsd. Can be of two types:
1. :py:obj:`mdtraj.Trajectory` object
2. filename for mdtraj to load. In this case, only the :py:obj:`ref_frame` of that file will be used.
ref_frame: integer, default=0
Reference frame of the filename specified in :py:obj:`ref`.
This parameter has no effect if :py:obj:`ref` is not a filename.
atom_indices: array_like, default=None
Atoms that will be used for:
1. aligning the target and reference geometries.
2. computing rmsd after the alignment.
If left to None, all atoms of :py:obj:`ref` will be used.
precentered: bool, default=False
Use this boolean at your own risk to let mdtraj know that the target conformations are already
centered at the origin, i.e., their (uniformly weighted) center of mass lies at the origin.
This will speed up the computation of the rmsd.
"""
from .misc import MinRmsdFeature
f = MinRmsdFeature(ref, ref_frame=ref_frame, atom_indices=atom_indices, topology=self.topology, precentered=precentered)
self.__add_feature(f)
|
def chain(*args):
"""Runs a series of parsers in sequence passing the result of each parser to the next.
The result of the last parser is returned.
"""
def chain_block(*args, **kwargs):
v = args[0](*args, **kwargs)
for p in args[1:]:
v = p(v)
return v
return chain_block
|
def function[chain, parameter[]]:
constant[Runs a series of parsers in sequence passing the result of each parser to the next.
The result of the last parser is returned.
]
def function[chain_block, parameter[]]:
variable[v] assign[=] call[call[name[args]][constant[0]], parameter[<ast.Starred object at 0x7da1afe0f6a0>]]
for taget[name[p]] in starred[call[name[args]][<ast.Slice object at 0x7da1afe0d030>]] begin[:]
variable[v] assign[=] call[name[p], parameter[name[v]]]
return[name[v]]
return[name[chain_block]]
|
keyword[def] identifier[chain] (* identifier[args] ):
literal[string]
keyword[def] identifier[chain_block] (* identifier[args] ,** identifier[kwargs] ):
identifier[v] = identifier[args] [ literal[int] ](* identifier[args] ,** identifier[kwargs] )
keyword[for] identifier[p] keyword[in] identifier[args] [ literal[int] :]:
identifier[v] = identifier[p] ( identifier[v] )
keyword[return] identifier[v]
keyword[return] identifier[chain_block]
|
def chain(*args):
"""Runs a series of parsers in sequence passing the result of each parser to the next.
The result of the last parser is returned.
"""
def chain_block(*args, **kwargs):
v = args[0](*args, **kwargs)
for p in args[1:]:
v = p(v) # depends on [control=['for'], data=['p']]
return v
return chain_block
|
def set_file_chksum(self, doc, chksum):
"""
Raises OrderError if no package or file defined.
Raises CardinalityError if more than one chksum set.
"""
if self.has_package(doc) and self.has_file(doc):
if not self.file_chksum_set:
self.file_chksum_set = True
self.file(doc).chk_sum = checksum_from_sha1(chksum)
return True
else:
raise CardinalityError('File::CheckSum')
else:
raise OrderError('File::CheckSum')
|
def function[set_file_chksum, parameter[self, doc, chksum]]:
constant[
Raises OrderError if no package or file defined.
Raises CardinalityError if more than one chksum set.
]
if <ast.BoolOp object at 0x7da1b016ae90> begin[:]
if <ast.UnaryOp object at 0x7da1b016b940> begin[:]
name[self].file_chksum_set assign[=] constant[True]
call[name[self].file, parameter[name[doc]]].chk_sum assign[=] call[name[checksum_from_sha1], parameter[name[chksum]]]
return[constant[True]]
|
keyword[def] identifier[set_file_chksum] ( identifier[self] , identifier[doc] , identifier[chksum] ):
literal[string]
keyword[if] identifier[self] . identifier[has_package] ( identifier[doc] ) keyword[and] identifier[self] . identifier[has_file] ( identifier[doc] ):
keyword[if] keyword[not] identifier[self] . identifier[file_chksum_set] :
identifier[self] . identifier[file_chksum_set] = keyword[True]
identifier[self] . identifier[file] ( identifier[doc] ). identifier[chk_sum] = identifier[checksum_from_sha1] ( identifier[chksum] )
keyword[return] keyword[True]
keyword[else] :
keyword[raise] identifier[CardinalityError] ( literal[string] )
keyword[else] :
keyword[raise] identifier[OrderError] ( literal[string] )
|
def set_file_chksum(self, doc, chksum):
"""
Raises OrderError if no package or file defined.
Raises CardinalityError if more than one chksum set.
"""
if self.has_package(doc) and self.has_file(doc):
if not self.file_chksum_set:
self.file_chksum_set = True
self.file(doc).chk_sum = checksum_from_sha1(chksum)
return True # depends on [control=['if'], data=[]]
else:
raise CardinalityError('File::CheckSum') # depends on [control=['if'], data=[]]
else:
raise OrderError('File::CheckSum')
|
def get_bank_hierarchy_design_session(self, proxy):
"""Gets the session designing bank hierarchies.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.BankHierarchyDesignSession) - a
``BankHierarchySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_hierarchy_design() is
false``
*compliance: optional -- This method must be implemented if
``supports_bank_hierarchy_design()`` is true.*
"""
if not self.supports_bank_hierarchy_design():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.BankHierarchyDesignSession(proxy=proxy, runtime=self._runtime)
|
def function[get_bank_hierarchy_design_session, parameter[self, proxy]]:
constant[Gets the session designing bank hierarchies.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.BankHierarchyDesignSession) - a
``BankHierarchySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_hierarchy_design() is
false``
*compliance: optional -- This method must be implemented if
``supports_bank_hierarchy_design()`` is true.*
]
if <ast.UnaryOp object at 0x7da207f008b0> begin[:]
<ast.Raise object at 0x7da20c796a40>
return[call[name[sessions].BankHierarchyDesignSession, parameter[]]]
|
keyword[def] identifier[get_bank_hierarchy_design_session] ( identifier[self] , identifier[proxy] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[supports_bank_hierarchy_design] ():
keyword[raise] identifier[errors] . identifier[Unimplemented] ()
keyword[return] identifier[sessions] . identifier[BankHierarchyDesignSession] ( identifier[proxy] = identifier[proxy] , identifier[runtime] = identifier[self] . identifier[_runtime] )
|
def get_bank_hierarchy_design_session(self, proxy):
"""Gets the session designing bank hierarchies.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.BankHierarchyDesignSession) - a
``BankHierarchySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_bank_hierarchy_design() is
false``
*compliance: optional -- This method must be implemented if
``supports_bank_hierarchy_design()`` is true.*
"""
if not self.supports_bank_hierarchy_design():
raise errors.Unimplemented() # depends on [control=['if'], data=[]]
# pylint: disable=no-member
return sessions.BankHierarchyDesignSession(proxy=proxy, runtime=self._runtime)
|
def prepare_data(data_dir, fileroot, block_pct_tokens_thresh=0.1):
"""
Prepare data for a single HTML + gold standard blocks example, uniquely
identified by ``fileroot``.
Args:
data_dir (str)
fileroot (str)
block_pct_tokens_thresh (float): must be in [0.0, 1.0]
Returns:
Tuple[str, Tuple[np.array[int], np.array[int], List[str]], Tuple[np.array[int], np.array[int], List[str]]]:
The first element is simply the raw html as a string. The second and
third elements are 3-tuples for content and comments, respectively,
where the first element is a numpy array of 1s and 0s whose values
correspond to whether or not a given block is considered non-content
or not; the second element is a numpy integer array whose values are
the total number of tokens in each block; and the third element is
a flat list of content or comment tokens as strings, concatenated
from all blocks.
See Also:
:func:`prepare_all_data`
"""
if not 0.0 <= block_pct_tokens_thresh <= 1.0:
raise ValueError('block_pct_tokens_thresh must be in the range [0.0, 1.0]')
html = read_html_file(data_dir, fileroot)
blocks = read_gold_standard_blocks_file(data_dir, fileroot, split_blocks=True)
content_blocks = []
comments_blocks = []
for block in blocks:
block_split = block.split('\t')
num_block_tokens = len(block_split[2].split())
# total number of tokens in block is used as weights
content_blocks.append(
(float(block_split[0]), num_block_tokens, block_split[3].split()))
comments_blocks.append(
(float(block_split[1]), num_block_tokens, block_split[4].split()))
parsed_content_blocks = _parse_content_or_comments_blocks(
content_blocks, block_pct_tokens_thresh)
parsed_comments_blocks = _parse_content_or_comments_blocks(
comments_blocks, block_pct_tokens_thresh)
return (html, parsed_content_blocks, parsed_comments_blocks)
|
def function[prepare_data, parameter[data_dir, fileroot, block_pct_tokens_thresh]]:
constant[
Prepare data for a single HTML + gold standard blocks example, uniquely
identified by ``fileroot``.
Args:
data_dir (str)
fileroot (str)
block_pct_tokens_thresh (float): must be in [0.0, 1.0]
Returns:
Tuple[str, Tuple[np.array[int], np.array[int], List[str]], Tuple[np.array[int], np.array[int], List[str]]]:
The first element is simply the raw html as a string. The second and
third elements are 3-tuples for content and comments, respectively,
where the first element is a numpy array of 1s and 0s whose values
correspond to whether or not a given block is considered non-content
or not; the second element is a numpy integer array whose values are
the total number of tokens in each block; and the third element is
a flat list of content or comment tokens as strings, concatenated
from all blocks.
See Also:
:func:`prepare_all_data`
]
if <ast.UnaryOp object at 0x7da1b1e303d0> begin[:]
<ast.Raise object at 0x7da1b1e30370>
variable[html] assign[=] call[name[read_html_file], parameter[name[data_dir], name[fileroot]]]
variable[blocks] assign[=] call[name[read_gold_standard_blocks_file], parameter[name[data_dir], name[fileroot]]]
variable[content_blocks] assign[=] list[[]]
variable[comments_blocks] assign[=] list[[]]
for taget[name[block]] in starred[name[blocks]] begin[:]
variable[block_split] assign[=] call[name[block].split, parameter[constant[ ]]]
variable[num_block_tokens] assign[=] call[name[len], parameter[call[call[name[block_split]][constant[2]].split, parameter[]]]]
call[name[content_blocks].append, parameter[tuple[[<ast.Call object at 0x7da1b1e30a30>, <ast.Name object at 0x7da1b1e333d0>, <ast.Call object at 0x7da1b1e32f20>]]]]
call[name[comments_blocks].append, parameter[tuple[[<ast.Call object at 0x7da1b1e95870>, <ast.Name object at 0x7da1b1e94ca0>, <ast.Call object at 0x7da1b1e94580>]]]]
variable[parsed_content_blocks] assign[=] call[name[_parse_content_or_comments_blocks], parameter[name[content_blocks], name[block_pct_tokens_thresh]]]
variable[parsed_comments_blocks] assign[=] call[name[_parse_content_or_comments_blocks], parameter[name[comments_blocks], name[block_pct_tokens_thresh]]]
return[tuple[[<ast.Name object at 0x7da1b1e969b0>, <ast.Name object at 0x7da1b1e94520>, <ast.Name object at 0x7da1b1e97e50>]]]
|
keyword[def] identifier[prepare_data] ( identifier[data_dir] , identifier[fileroot] , identifier[block_pct_tokens_thresh] = literal[int] ):
literal[string]
keyword[if] keyword[not] literal[int] <= identifier[block_pct_tokens_thresh] <= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[html] = identifier[read_html_file] ( identifier[data_dir] , identifier[fileroot] )
identifier[blocks] = identifier[read_gold_standard_blocks_file] ( identifier[data_dir] , identifier[fileroot] , identifier[split_blocks] = keyword[True] )
identifier[content_blocks] =[]
identifier[comments_blocks] =[]
keyword[for] identifier[block] keyword[in] identifier[blocks] :
identifier[block_split] = identifier[block] . identifier[split] ( literal[string] )
identifier[num_block_tokens] = identifier[len] ( identifier[block_split] [ literal[int] ]. identifier[split] ())
identifier[content_blocks] . identifier[append] (
( identifier[float] ( identifier[block_split] [ literal[int] ]), identifier[num_block_tokens] , identifier[block_split] [ literal[int] ]. identifier[split] ()))
identifier[comments_blocks] . identifier[append] (
( identifier[float] ( identifier[block_split] [ literal[int] ]), identifier[num_block_tokens] , identifier[block_split] [ literal[int] ]. identifier[split] ()))
identifier[parsed_content_blocks] = identifier[_parse_content_or_comments_blocks] (
identifier[content_blocks] , identifier[block_pct_tokens_thresh] )
identifier[parsed_comments_blocks] = identifier[_parse_content_or_comments_blocks] (
identifier[comments_blocks] , identifier[block_pct_tokens_thresh] )
keyword[return] ( identifier[html] , identifier[parsed_content_blocks] , identifier[parsed_comments_blocks] )
|
def prepare_data(data_dir, fileroot, block_pct_tokens_thresh=0.1):
"""
Prepare data for a single HTML + gold standard blocks example, uniquely
identified by ``fileroot``.
Args:
data_dir (str)
fileroot (str)
block_pct_tokens_thresh (float): must be in [0.0, 1.0]
Returns:
Tuple[str, Tuple[np.array[int], np.array[int], List[str]], Tuple[np.array[int], np.array[int], List[str]]]:
The first element is simply the raw html as a string. The second and
third elements are 3-tuples for content and comments, respectively,
where the first element is a numpy array of 1s and 0s whose values
correspond to whether or not a given block is considered non-content
or not; the second element is a numpy integer array whose values are
the total number of tokens in each block; and the third element is
a flat list of content or comment tokens as strings, concatenated
from all blocks.
See Also:
:func:`prepare_all_data`
"""
if not 0.0 <= block_pct_tokens_thresh <= 1.0:
raise ValueError('block_pct_tokens_thresh must be in the range [0.0, 1.0]') # depends on [control=['if'], data=[]]
html = read_html_file(data_dir, fileroot)
blocks = read_gold_standard_blocks_file(data_dir, fileroot, split_blocks=True)
content_blocks = []
comments_blocks = []
for block in blocks:
block_split = block.split('\t')
num_block_tokens = len(block_split[2].split())
# total number of tokens in block is used as weights
content_blocks.append((float(block_split[0]), num_block_tokens, block_split[3].split()))
comments_blocks.append((float(block_split[1]), num_block_tokens, block_split[4].split())) # depends on [control=['for'], data=['block']]
parsed_content_blocks = _parse_content_or_comments_blocks(content_blocks, block_pct_tokens_thresh)
parsed_comments_blocks = _parse_content_or_comments_blocks(comments_blocks, block_pct_tokens_thresh)
return (html, parsed_content_blocks, parsed_comments_blocks)
|
def _fetch_items(self, store_load, iterable, args, kwargs):
""" Method used by f_store/load/remove_items to find corresponding items in the tree.
:param store_load:
String constant specifying if we want to store, load or remove.
The corresponding constants are defined at the top of this module.
:param iterable:
Iterable over items to look for in the tree. Can be strings specifying names,
can be the item instances themselves or already correctly formatted tuples.
:param args: Additional arguments passed to the storage service
:param kwargs:
Additional keyword arguments passed to the storage service.
Two optional keyword arguments are popped and used by this method.
only_empties:
Can be in kwargs if only empty parameters and results should be considered.
non_empties:
Can be in kwargs if only non-empty parameters and results should be considered.
:return:
A list containing formatted tuples.
These tuples can be handled by the storage service, they have
the following format: (msg, item_to_store_load_or_remove, args, kwargs)
"""
only_empties = kwargs.pop('only_empties', False)
non_empties = kwargs.pop('non_empties', False)
item_list = []
# Iterate through the iterable and apply the appropriate fetching method via try and error.
for iter_item in iterable:
try:
item_tuple = self._fetch_from_string(store_load, iter_item, args, kwargs)
except TypeError:
try:
item_tuple = self._fetch_from_node(store_load, iter_item, args, kwargs)
except AttributeError:
item_tuple = self._fetch_from_tuple(store_load, iter_item, args, kwargs)
item = item_tuple[1]
msg = item_tuple[0]
if item.v_is_leaf:
if only_empties and not item.f_is_empty():
continue
if non_empties and item.f_is_empty():
continue
# # Explored Parameters cannot be deleted, this would break the underlying hdf5 file
# # structure
# if (msg == pypetconstants.DELETE and
# item.v_full_name in self._root_instance._explored_parameters and
# len(self._root_instance._explored_parameters) == 1):
# raise TypeError('You cannot the last explored parameter of a trajectory stored '
# 'into an hdf5 file.')
item_list.append(item_tuple)
return item_list
|
def function[_fetch_items, parameter[self, store_load, iterable, args, kwargs]]:
constant[ Method used by f_store/load/remove_items to find corresponding items in the tree.
:param store_load:
String constant specifying if we want to store, load or remove.
The corresponding constants are defined at the top of this module.
:param iterable:
Iterable over items to look for in the tree. Can be strings specifying names,
can be the item instances themselves or already correctly formatted tuples.
:param args: Additional arguments passed to the storage service
:param kwargs:
Additional keyword arguments passed to the storage service.
Two optional keyword arguments are popped and used by this method.
only_empties:
Can be in kwargs if only empty parameters and results should be considered.
non_empties:
Can be in kwargs if only non-empty parameters and results should be considered.
:return:
A list containing formatted tuples.
These tuples can be handled by the storage service, they have
the following format: (msg, item_to_store_load_or_remove, args, kwargs)
]
variable[only_empties] assign[=] call[name[kwargs].pop, parameter[constant[only_empties], constant[False]]]
variable[non_empties] assign[=] call[name[kwargs].pop, parameter[constant[non_empties], constant[False]]]
variable[item_list] assign[=] list[[]]
for taget[name[iter_item]] in starred[name[iterable]] begin[:]
<ast.Try object at 0x7da1b01e0310>
variable[item] assign[=] call[name[item_tuple]][constant[1]]
variable[msg] assign[=] call[name[item_tuple]][constant[0]]
if name[item].v_is_leaf begin[:]
if <ast.BoolOp object at 0x7da1b01e25f0> begin[:]
continue
if <ast.BoolOp object at 0x7da1b01e2410> begin[:]
continue
call[name[item_list].append, parameter[name[item_tuple]]]
return[name[item_list]]
|
keyword[def] identifier[_fetch_items] ( identifier[self] , identifier[store_load] , identifier[iterable] , identifier[args] , identifier[kwargs] ):
literal[string]
identifier[only_empties] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] )
identifier[non_empties] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] )
identifier[item_list] =[]
keyword[for] identifier[iter_item] keyword[in] identifier[iterable] :
keyword[try] :
identifier[item_tuple] = identifier[self] . identifier[_fetch_from_string] ( identifier[store_load] , identifier[iter_item] , identifier[args] , identifier[kwargs] )
keyword[except] identifier[TypeError] :
keyword[try] :
identifier[item_tuple] = identifier[self] . identifier[_fetch_from_node] ( identifier[store_load] , identifier[iter_item] , identifier[args] , identifier[kwargs] )
keyword[except] identifier[AttributeError] :
identifier[item_tuple] = identifier[self] . identifier[_fetch_from_tuple] ( identifier[store_load] , identifier[iter_item] , identifier[args] , identifier[kwargs] )
identifier[item] = identifier[item_tuple] [ literal[int] ]
identifier[msg] = identifier[item_tuple] [ literal[int] ]
keyword[if] identifier[item] . identifier[v_is_leaf] :
keyword[if] identifier[only_empties] keyword[and] keyword[not] identifier[item] . identifier[f_is_empty] ():
keyword[continue]
keyword[if] identifier[non_empties] keyword[and] identifier[item] . identifier[f_is_empty] ():
keyword[continue]
identifier[item_list] . identifier[append] ( identifier[item_tuple] )
keyword[return] identifier[item_list]
|
def _fetch_items(self, store_load, iterable, args, kwargs):
""" Method used by f_store/load/remove_items to find corresponding items in the tree.
:param store_load:
String constant specifying if we want to store, load or remove.
The corresponding constants are defined at the top of this module.
:param iterable:
Iterable over items to look for in the tree. Can be strings specifying names,
can be the item instances themselves or already correctly formatted tuples.
:param args: Additional arguments passed to the storage service
:param kwargs:
Additional keyword arguments passed to the storage service.
Two optional keyword arguments are popped and used by this method.
only_empties:
Can be in kwargs if only empty parameters and results should be considered.
non_empties:
Can be in kwargs if only non-empty parameters and results should be considered.
:return:
A list containing formatted tuples.
These tuples can be handled by the storage service, they have
the following format: (msg, item_to_store_load_or_remove, args, kwargs)
"""
only_empties = kwargs.pop('only_empties', False)
non_empties = kwargs.pop('non_empties', False)
item_list = []
# Iterate through the iterable and apply the appropriate fetching method via try and error.
for iter_item in iterable:
try:
item_tuple = self._fetch_from_string(store_load, iter_item, args, kwargs) # depends on [control=['try'], data=[]]
except TypeError:
try:
item_tuple = self._fetch_from_node(store_load, iter_item, args, kwargs) # depends on [control=['try'], data=[]]
except AttributeError:
item_tuple = self._fetch_from_tuple(store_load, iter_item, args, kwargs) # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
item = item_tuple[1]
msg = item_tuple[0]
if item.v_is_leaf:
if only_empties and (not item.f_is_empty()):
continue # depends on [control=['if'], data=[]]
if non_empties and item.f_is_empty():
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# # Explored Parameters cannot be deleted, this would break the underlying hdf5 file
# # structure
# if (msg == pypetconstants.DELETE and
# item.v_full_name in self._root_instance._explored_parameters and
# len(self._root_instance._explored_parameters) == 1):
# raise TypeError('You cannot the last explored parameter of a trajectory stored '
# 'into an hdf5 file.')
item_list.append(item_tuple) # depends on [control=['for'], data=['iter_item']]
return item_list
|
def matches(self, other):
'''
A disjunctive list matches a phoneme if any of its members matches the phoneme.
If other is also a disjunctive list, any match between this list and the other returns true.
'''
if other is None:
return False
if isinstance(other, PhonemeDisjunction):
return any([phoneme.matches(other) for phoneme in self])
if isinstance(other, list) or isinstance(other, PhonologicalFeature):
other = phoneme(other)
return any([phoneme <= other for phoneme in self])
|
def function[matches, parameter[self, other]]:
constant[
A disjunctive list matches a phoneme if any of its members matches the phoneme.
If other is also a disjunctive list, any match between this list and the other returns true.
]
if compare[name[other] is constant[None]] begin[:]
return[constant[False]]
if call[name[isinstance], parameter[name[other], name[PhonemeDisjunction]]] begin[:]
return[call[name[any], parameter[<ast.ListComp object at 0x7da1b26af280>]]]
if <ast.BoolOp object at 0x7da1b26ac700> begin[:]
variable[other] assign[=] call[name[phoneme], parameter[name[other]]]
return[call[name[any], parameter[<ast.ListComp object at 0x7da1b26adb70>]]]
|
keyword[def] identifier[matches] ( identifier[self] , identifier[other] ):
literal[string]
keyword[if] identifier[other] keyword[is] keyword[None] :
keyword[return] keyword[False]
keyword[if] identifier[isinstance] ( identifier[other] , identifier[PhonemeDisjunction] ):
keyword[return] identifier[any] ([ identifier[phoneme] . identifier[matches] ( identifier[other] ) keyword[for] identifier[phoneme] keyword[in] identifier[self] ])
keyword[if] identifier[isinstance] ( identifier[other] , identifier[list] ) keyword[or] identifier[isinstance] ( identifier[other] , identifier[PhonologicalFeature] ):
identifier[other] = identifier[phoneme] ( identifier[other] )
keyword[return] identifier[any] ([ identifier[phoneme] <= identifier[other] keyword[for] identifier[phoneme] keyword[in] identifier[self] ])
|
def matches(self, other):
"""
A disjunctive list matches a phoneme if any of its members matches the phoneme.
If other is also a disjunctive list, any match between this list and the other returns true.
"""
if other is None:
return False # depends on [control=['if'], data=[]]
if isinstance(other, PhonemeDisjunction):
return any([phoneme.matches(other) for phoneme in self]) # depends on [control=['if'], data=[]]
if isinstance(other, list) or isinstance(other, PhonologicalFeature):
other = phoneme(other) # depends on [control=['if'], data=[]]
return any([phoneme <= other for phoneme in self])
|
def prefix_shared_name_attributes(meta_graph, absolute_import_scope):
"""In-place prefixes shared_name attributes of nodes."""
shared_name_attr = "shared_name"
for node in meta_graph.graph_def.node:
shared_name_value = node.attr.get(shared_name_attr, None)
if shared_name_value and shared_name_value.HasField("s"):
if shared_name_value.s:
node.attr[shared_name_attr].s = tf.compat.as_bytes(
prepend_name_scope(
shared_name_value.s, import_scope=absolute_import_scope))
|
def function[prefix_shared_name_attributes, parameter[meta_graph, absolute_import_scope]]:
constant[In-place prefixes shared_name attributes of nodes.]
variable[shared_name_attr] assign[=] constant[shared_name]
for taget[name[node]] in starred[name[meta_graph].graph_def.node] begin[:]
variable[shared_name_value] assign[=] call[name[node].attr.get, parameter[name[shared_name_attr], constant[None]]]
if <ast.BoolOp object at 0x7da20c6aa290> begin[:]
if name[shared_name_value].s begin[:]
call[name[node].attr][name[shared_name_attr]].s assign[=] call[name[tf].compat.as_bytes, parameter[call[name[prepend_name_scope], parameter[name[shared_name_value].s]]]]
|
keyword[def] identifier[prefix_shared_name_attributes] ( identifier[meta_graph] , identifier[absolute_import_scope] ):
literal[string]
identifier[shared_name_attr] = literal[string]
keyword[for] identifier[node] keyword[in] identifier[meta_graph] . identifier[graph_def] . identifier[node] :
identifier[shared_name_value] = identifier[node] . identifier[attr] . identifier[get] ( identifier[shared_name_attr] , keyword[None] )
keyword[if] identifier[shared_name_value] keyword[and] identifier[shared_name_value] . identifier[HasField] ( literal[string] ):
keyword[if] identifier[shared_name_value] . identifier[s] :
identifier[node] . identifier[attr] [ identifier[shared_name_attr] ]. identifier[s] = identifier[tf] . identifier[compat] . identifier[as_bytes] (
identifier[prepend_name_scope] (
identifier[shared_name_value] . identifier[s] , identifier[import_scope] = identifier[absolute_import_scope] ))
|
def prefix_shared_name_attributes(meta_graph, absolute_import_scope):
"""In-place prefixes shared_name attributes of nodes."""
shared_name_attr = 'shared_name'
for node in meta_graph.graph_def.node:
shared_name_value = node.attr.get(shared_name_attr, None)
if shared_name_value and shared_name_value.HasField('s'):
if shared_name_value.s:
node.attr[shared_name_attr].s = tf.compat.as_bytes(prepend_name_scope(shared_name_value.s, import_scope=absolute_import_scope)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']]
|
def write(self, output_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Write the data encoding the QueryRequestPayload object to a stream.
Args:
output_buffer (Stream): A data stream in which to encode object
data, supporting a write method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be encoded. Optional,
defaults to KMIP 1.0.
Raises:
InvalidField: Raised if the query functions are not defined.
"""
local_buffer = utils.BytearrayStream()
if self._query_functions:
for query_function in self._query_functions:
query_function.write(local_buffer, kmip_version=kmip_version)
else:
raise exceptions.InvalidField(
"The Query request payload is missing the query functions "
"field."
)
self.length = local_buffer.length()
super(QueryRequestPayload, self).write(
output_buffer,
kmip_version=kmip_version
)
output_buffer.write(local_buffer.buffer)
|
def function[write, parameter[self, output_buffer, kmip_version]]:
constant[
Write the data encoding the QueryRequestPayload object to a stream.
Args:
output_buffer (Stream): A data stream in which to encode object
data, supporting a write method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be encoded. Optional,
defaults to KMIP 1.0.
Raises:
InvalidField: Raised if the query functions are not defined.
]
variable[local_buffer] assign[=] call[name[utils].BytearrayStream, parameter[]]
if name[self]._query_functions begin[:]
for taget[name[query_function]] in starred[name[self]._query_functions] begin[:]
call[name[query_function].write, parameter[name[local_buffer]]]
name[self].length assign[=] call[name[local_buffer].length, parameter[]]
call[call[name[super], parameter[name[QueryRequestPayload], name[self]]].write, parameter[name[output_buffer]]]
call[name[output_buffer].write, parameter[name[local_buffer].buffer]]
|
keyword[def] identifier[write] ( identifier[self] , identifier[output_buffer] , identifier[kmip_version] = identifier[enums] . identifier[KMIPVersion] . identifier[KMIP_1_0] ):
literal[string]
identifier[local_buffer] = identifier[utils] . identifier[BytearrayStream] ()
keyword[if] identifier[self] . identifier[_query_functions] :
keyword[for] identifier[query_function] keyword[in] identifier[self] . identifier[_query_functions] :
identifier[query_function] . identifier[write] ( identifier[local_buffer] , identifier[kmip_version] = identifier[kmip_version] )
keyword[else] :
keyword[raise] identifier[exceptions] . identifier[InvalidField] (
literal[string]
literal[string]
)
identifier[self] . identifier[length] = identifier[local_buffer] . identifier[length] ()
identifier[super] ( identifier[QueryRequestPayload] , identifier[self] ). identifier[write] (
identifier[output_buffer] ,
identifier[kmip_version] = identifier[kmip_version]
)
identifier[output_buffer] . identifier[write] ( identifier[local_buffer] . identifier[buffer] )
|
def write(self, output_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Write the data encoding the QueryRequestPayload object to a stream.
Args:
output_buffer (Stream): A data stream in which to encode object
data, supporting a write method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be encoded. Optional,
defaults to KMIP 1.0.
Raises:
InvalidField: Raised if the query functions are not defined.
"""
local_buffer = utils.BytearrayStream()
if self._query_functions:
for query_function in self._query_functions:
query_function.write(local_buffer, kmip_version=kmip_version) # depends on [control=['for'], data=['query_function']] # depends on [control=['if'], data=[]]
else:
raise exceptions.InvalidField('The Query request payload is missing the query functions field.')
self.length = local_buffer.length()
super(QueryRequestPayload, self).write(output_buffer, kmip_version=kmip_version)
output_buffer.write(local_buffer.buffer)
|
def get_is_group_member(self, grp_name, user):
"""
Check if the given user is a member of the named group.
Note that a group maintainer is not considered a member unless the
user is also explicitly added as a member.
Args:
name (string): Name of group.
user_name (string): User of interest.
Returns:
(bool): False if user not a member.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.get_is_group_member(grp_name, user)
|
def function[get_is_group_member, parameter[self, grp_name, user]]:
constant[
Check if the given user is a member of the named group.
Note that a group maintainer is not considered a member unless the
user is also explicitly added as a member.
Args:
name (string): Name of group.
user_name (string): User of interest.
Returns:
(bool): False if user not a member.
]
call[name[self].project_service.set_auth, parameter[name[self]._token_project]]
return[call[name[self].project_service.get_is_group_member, parameter[name[grp_name], name[user]]]]
|
keyword[def] identifier[get_is_group_member] ( identifier[self] , identifier[grp_name] , identifier[user] ):
literal[string]
identifier[self] . identifier[project_service] . identifier[set_auth] ( identifier[self] . identifier[_token_project] )
keyword[return] identifier[self] . identifier[project_service] . identifier[get_is_group_member] ( identifier[grp_name] , identifier[user] )
|
def get_is_group_member(self, grp_name, user):
"""
Check if the given user is a member of the named group.
Note that a group maintainer is not considered a member unless the
user is also explicitly added as a member.
Args:
name (string): Name of group.
user_name (string): User of interest.
Returns:
(bool): False if user not a member.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.get_is_group_member(grp_name, user)
|
def bpp(s,B):
"""bpp: Martello and Toth's model to solve the bin packing problem.
Parameters:
- s: list with item widths
- B: bin capacity
Returns a model, ready to be solved.
"""
n = len(s)
U = len(FFD(s,B)) # upper bound of the number of bins
model = Model("bpp")
# setParam("MIPFocus",1)
x,y = {},{}
for i in range(n):
for j in range(U):
x[i,j] = model.addVar(vtype="B", name="x(%s,%s)"%(i,j))
for j in range(U):
y[j] = model.addVar(vtype="B", name="y(%s)"%j)
# assignment constraints
for i in range(n):
model.addCons(quicksum(x[i,j] for j in range(U)) == 1, "Assign(%s)"%i)
# bin capacity constraints
for j in range(U):
model.addCons(quicksum(s[i]*x[i,j] for i in range(n)) <= B*y[j], "Capac(%s)"%j)
# tighten assignment constraints
for j in range(U):
for i in range(n):
model.addCons(x[i,j] <= y[j], "Strong(%s,%s)"%(i,j))
# tie breaking constraints
for j in range(U-1):
model.addCons(y[j] >= y[j+1],"TieBrk(%s)"%j)
# SOS constraints
for i in range(n):
model.addConsSOS1([x[i,j] for j in range(U)])
model.setObjective(quicksum(y[j] for j in range(U)), "minimize")
model.data = x,y
return model
|
def function[bpp, parameter[s, B]]:
constant[bpp: Martello and Toth's model to solve the bin packing problem.
Parameters:
- s: list with item widths
- B: bin capacity
Returns a model, ready to be solved.
]
variable[n] assign[=] call[name[len], parameter[name[s]]]
variable[U] assign[=] call[name[len], parameter[call[name[FFD], parameter[name[s], name[B]]]]]
variable[model] assign[=] call[name[Model], parameter[constant[bpp]]]
<ast.Tuple object at 0x7da1b18e79d0> assign[=] tuple[[<ast.Dict object at 0x7da1b18e5780>, <ast.Dict object at 0x7da1b18e53f0>]]
for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[name[U]]]] begin[:]
call[name[x]][tuple[[<ast.Name object at 0x7da1b18e56f0>, <ast.Name object at 0x7da1b18e70d0>]]] assign[=] call[name[model].addVar, parameter[]]
for taget[name[j]] in starred[call[name[range], parameter[name[U]]]] begin[:]
call[name[y]][name[j]] assign[=] call[name[model].addVar, parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:]
call[name[model].addCons, parameter[compare[call[name[quicksum], parameter[<ast.GeneratorExp object at 0x7da1b17f6a40>]] equal[==] constant[1]], binary_operation[constant[Assign(%s)] <ast.Mod object at 0x7da2590d6920> name[i]]]]
for taget[name[j]] in starred[call[name[range], parameter[name[U]]]] begin[:]
call[name[model].addCons, parameter[compare[call[name[quicksum], parameter[<ast.GeneratorExp object at 0x7da1b17f6170>]] less_or_equal[<=] binary_operation[name[B] * call[name[y]][name[j]]]], binary_operation[constant[Capac(%s)] <ast.Mod object at 0x7da2590d6920> name[j]]]]
for taget[name[j]] in starred[call[name[range], parameter[name[U]]]] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:]
call[name[model].addCons, parameter[compare[call[name[x]][tuple[[<ast.Name object at 0x7da1b17f7700>, <ast.Name object at 0x7da1b17f6bc0>]]] less_or_equal[<=] call[name[y]][name[j]]], binary_operation[constant[Strong(%s,%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b17f6ad0>, <ast.Name object at 0x7da1b17f7940>]]]]]
for taget[name[j]] in starred[call[name[range], parameter[binary_operation[name[U] - constant[1]]]]] begin[:]
call[name[model].addCons, parameter[compare[call[name[y]][name[j]] greater_or_equal[>=] call[name[y]][binary_operation[name[j] + constant[1]]]], binary_operation[constant[TieBrk(%s)] <ast.Mod object at 0x7da2590d6920> name[j]]]]
for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:]
call[name[model].addConsSOS1, parameter[<ast.ListComp object at 0x7da1b17f6140>]]
call[name[model].setObjective, parameter[call[name[quicksum], parameter[<ast.GeneratorExp object at 0x7da1b17f6710>]], constant[minimize]]]
name[model].data assign[=] tuple[[<ast.Name object at 0x7da1b17f7670>, <ast.Name object at 0x7da1b17f5d20>]]
return[name[model]]
|
keyword[def] identifier[bpp] ( identifier[s] , identifier[B] ):
literal[string]
identifier[n] = identifier[len] ( identifier[s] )
identifier[U] = identifier[len] ( identifier[FFD] ( identifier[s] , identifier[B] ))
identifier[model] = identifier[Model] ( literal[string] )
identifier[x] , identifier[y] ={},{}
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[U] ):
identifier[x] [ identifier[i] , identifier[j] ]= identifier[model] . identifier[addVar] ( identifier[vtype] = literal[string] , identifier[name] = literal[string] %( identifier[i] , identifier[j] ))
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[U] ):
identifier[y] [ identifier[j] ]= identifier[model] . identifier[addVar] ( identifier[vtype] = literal[string] , identifier[name] = literal[string] % identifier[j] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ):
identifier[model] . identifier[addCons] ( identifier[quicksum] ( identifier[x] [ identifier[i] , identifier[j] ] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[U] ))== literal[int] , literal[string] % identifier[i] )
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[U] ):
identifier[model] . identifier[addCons] ( identifier[quicksum] ( identifier[s] [ identifier[i] ]* identifier[x] [ identifier[i] , identifier[j] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ))<= identifier[B] * identifier[y] [ identifier[j] ], literal[string] % identifier[j] )
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[U] ):
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ):
identifier[model] . identifier[addCons] ( identifier[x] [ identifier[i] , identifier[j] ]<= identifier[y] [ identifier[j] ], literal[string] %( identifier[i] , identifier[j] ))
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[U] - literal[int] ):
identifier[model] . identifier[addCons] ( identifier[y] [ identifier[j] ]>= identifier[y] [ identifier[j] + literal[int] ], literal[string] % identifier[j] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ):
identifier[model] . identifier[addConsSOS1] ([ identifier[x] [ identifier[i] , identifier[j] ] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[U] )])
identifier[model] . identifier[setObjective] ( identifier[quicksum] ( identifier[y] [ identifier[j] ] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[U] )), literal[string] )
identifier[model] . identifier[data] = identifier[x] , identifier[y]
keyword[return] identifier[model]
|
def bpp(s, B):
"""bpp: Martello and Toth's model to solve the bin packing problem.
Parameters:
- s: list with item widths
- B: bin capacity
Returns a model, ready to be solved.
"""
n = len(s)
U = len(FFD(s, B)) # upper bound of the number of bins
model = Model('bpp')
# setParam("MIPFocus",1)
(x, y) = ({}, {})
for i in range(n):
for j in range(U):
x[i, j] = model.addVar(vtype='B', name='x(%s,%s)' % (i, j)) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
for j in range(U):
y[j] = model.addVar(vtype='B', name='y(%s)' % j) # depends on [control=['for'], data=['j']]
# assignment constraints
for i in range(n):
model.addCons(quicksum((x[i, j] for j in range(U))) == 1, 'Assign(%s)' % i) # depends on [control=['for'], data=['i']]
# bin capacity constraints
for j in range(U):
model.addCons(quicksum((s[i] * x[i, j] for i in range(n))) <= B * y[j], 'Capac(%s)' % j) # depends on [control=['for'], data=['j']]
# tighten assignment constraints
for j in range(U):
for i in range(n):
model.addCons(x[i, j] <= y[j], 'Strong(%s,%s)' % (i, j)) # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['j']]
# tie breaking constraints
for j in range(U - 1):
model.addCons(y[j] >= y[j + 1], 'TieBrk(%s)' % j) # depends on [control=['for'], data=['j']]
# SOS constraints
for i in range(n):
model.addConsSOS1([x[i, j] for j in range(U)]) # depends on [control=['for'], data=['i']]
model.setObjective(quicksum((y[j] for j in range(U))), 'minimize')
model.data = (x, y)
return model
|
def build(_resource, _cache=True, **kwargs):
"""Build a schema from input _resource.
:param _resource: object from where get the right schema.
:param bool _cache: use cache system.
:rtype: Schema.
"""
return _SCHEMAFACTORY.build(_resource=_resource, _cache=True, **kwargs)
|
def function[build, parameter[_resource, _cache]]:
constant[Build a schema from input _resource.
:param _resource: object from where get the right schema.
:param bool _cache: use cache system.
:rtype: Schema.
]
return[call[name[_SCHEMAFACTORY].build, parameter[]]]
|
keyword[def] identifier[build] ( identifier[_resource] , identifier[_cache] = keyword[True] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[_SCHEMAFACTORY] . identifier[build] ( identifier[_resource] = identifier[_resource] , identifier[_cache] = keyword[True] ,** identifier[kwargs] )
|
def build(_resource, _cache=True, **kwargs):
"""Build a schema from input _resource.
:param _resource: object from where get the right schema.
:param bool _cache: use cache system.
:rtype: Schema.
"""
return _SCHEMAFACTORY.build(_resource=_resource, _cache=True, **kwargs)
|
def reversible_deroot(self):
""" Stores info required to restore rootedness to derooted Tree. Returns
the edge that was originally rooted, the length of e1, and the length
of e2.
Dendropy Derooting Process:
In a rooted tree the root node is bifurcating. Derooting makes it
trifurcating.
Call the two edges leading out of the root node e1 and e2.
Derooting with Tree.deroot() deletes one of e1 and e2 (let's say e2),
and stretches the other to the sum of their lengths. Call this e3.
Rooted tree: Derooted tree:
A A B
|_ B \ /
/ |
/e1 |e3 (length = e1+e2; e2 is deleted)
Root--o ===> |
\e2 Root--o _ C
\ _ C |
| D
D
Reverse this with Tree.reroot_at_edge(edge, length1, length2, ...)
"""
root_edge = self._tree.seed_node.edge
lengths = dict([(edge, edge.length) for edge
in self._tree.seed_node.incident_edges() if edge is not root_edge])
self._tree.deroot()
reroot_edge = (set(self._tree.seed_node.incident_edges())
& set(lengths.keys())).pop()
self._tree.encode_bipartitions()
self._dirty = True
return (reroot_edge, reroot_edge.length - lengths[reroot_edge],
lengths[reroot_edge])
|
def function[reversible_deroot, parameter[self]]:
constant[ Stores info required to restore rootedness to derooted Tree. Returns
the edge that was originally rooted, the length of e1, and the length
of e2.
Dendropy Derooting Process:
In a rooted tree the root node is bifurcating. Derooting makes it
trifurcating.
Call the two edges leading out of the root node e1 and e2.
Derooting with Tree.deroot() deletes one of e1 and e2 (let's say e2),
and stretches the other to the sum of their lengths. Call this e3.
Rooted tree: Derooted tree:
A A B
|_ B \ /
/ |
/e1 |e3 (length = e1+e2; e2 is deleted)
Root--o ===> |
\e2 Root--o _ C
\ _ C |
| D
D
Reverse this with Tree.reroot_at_edge(edge, length1, length2, ...)
]
variable[root_edge] assign[=] name[self]._tree.seed_node.edge
variable[lengths] assign[=] call[name[dict], parameter[<ast.ListComp object at 0x7da18dc07ee0>]]
call[name[self]._tree.deroot, parameter[]]
variable[reroot_edge] assign[=] call[binary_operation[call[name[set], parameter[call[name[self]._tree.seed_node.incident_edges, parameter[]]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[call[name[lengths].keys, parameter[]]]]].pop, parameter[]]
call[name[self]._tree.encode_bipartitions, parameter[]]
name[self]._dirty assign[=] constant[True]
return[tuple[[<ast.Name object at 0x7da18dc06170>, <ast.BinOp object at 0x7da18dc04700>, <ast.Subscript object at 0x7da18dc05660>]]]
|
keyword[def] identifier[reversible_deroot] ( identifier[self] ):
literal[string]
identifier[root_edge] = identifier[self] . identifier[_tree] . identifier[seed_node] . identifier[edge]
identifier[lengths] = identifier[dict] ([( identifier[edge] , identifier[edge] . identifier[length] ) keyword[for] identifier[edge]
keyword[in] identifier[self] . identifier[_tree] . identifier[seed_node] . identifier[incident_edges] () keyword[if] identifier[edge] keyword[is] keyword[not] identifier[root_edge] ])
identifier[self] . identifier[_tree] . identifier[deroot] ()
identifier[reroot_edge] =( identifier[set] ( identifier[self] . identifier[_tree] . identifier[seed_node] . identifier[incident_edges] ())
& identifier[set] ( identifier[lengths] . identifier[keys] ())). identifier[pop] ()
identifier[self] . identifier[_tree] . identifier[encode_bipartitions] ()
identifier[self] . identifier[_dirty] = keyword[True]
keyword[return] ( identifier[reroot_edge] , identifier[reroot_edge] . identifier[length] - identifier[lengths] [ identifier[reroot_edge] ],
identifier[lengths] [ identifier[reroot_edge] ])
|
def reversible_deroot(self):
""" Stores info required to restore rootedness to derooted Tree. Returns
the edge that was originally rooted, the length of e1, and the length
of e2.
Dendropy Derooting Process:
In a rooted tree the root node is bifurcating. Derooting makes it
trifurcating.
Call the two edges leading out of the root node e1 and e2.
Derooting with Tree.deroot() deletes one of e1 and e2 (let's say e2),
and stretches the other to the sum of their lengths. Call this e3.
Rooted tree: Derooted tree:
A A B
|_ B \\ /
/ |
/e1 |e3 (length = e1+e2; e2 is deleted)
Root--o ===> |
\\e2 Root--o _ C
\\ _ C |
| D
D
Reverse this with Tree.reroot_at_edge(edge, length1, length2, ...)
"""
root_edge = self._tree.seed_node.edge
lengths = dict([(edge, edge.length) for edge in self._tree.seed_node.incident_edges() if edge is not root_edge])
self._tree.deroot()
reroot_edge = (set(self._tree.seed_node.incident_edges()) & set(lengths.keys())).pop()
self._tree.encode_bipartitions()
self._dirty = True
return (reroot_edge, reroot_edge.length - lengths[reroot_edge], lengths[reroot_edge])
|
def prox_unity_plus(X, step, axis=0):
"""Non-negative projection onto sum=1 along an axis
"""
return prox_unity(prox_plus(X, step), step, axis=axis)
|
def function[prox_unity_plus, parameter[X, step, axis]]:
constant[Non-negative projection onto sum=1 along an axis
]
return[call[name[prox_unity], parameter[call[name[prox_plus], parameter[name[X], name[step]]], name[step]]]]
|
keyword[def] identifier[prox_unity_plus] ( identifier[X] , identifier[step] , identifier[axis] = literal[int] ):
literal[string]
keyword[return] identifier[prox_unity] ( identifier[prox_plus] ( identifier[X] , identifier[step] ), identifier[step] , identifier[axis] = identifier[axis] )
|
def prox_unity_plus(X, step, axis=0):
"""Non-negative projection onto sum=1 along an axis
"""
return prox_unity(prox_plus(X, step), step, axis=axis)
|
def shape(self) -> Tuple[int, int]:
"""Required shape of |NetCDFVariableAgg.array|.
For the default configuration, the first axis corresponds to the
number of devices, and the second one to the number of timesteps.
We show this for the 1-dimensional input sequence |lland_fluxes.NKor|:
>>> from hydpy.core.examples import prepare_io_example_1
>>> nodes, elements = prepare_io_example_1()
>>> from hydpy.core.netcdftools import NetCDFVariableAgg
>>> ncvar = NetCDFVariableAgg('flux_nkor', isolate=False, timeaxis=1)
>>> for element in elements:
... ncvar.log(element.model.sequences.fluxes.nkor, None)
>>> ncvar.shape
(3, 4)
When using the first axis as the "timeaxis", the order of |tuple|
entries turns:
>>> ncvar = NetCDFVariableAgg('flux_nkor', isolate=False, timeaxis=0)
>>> for element in elements:
... ncvar.log(element.model.sequences.fluxes.nkor, None)
>>> ncvar.shape
(4, 3)
"""
return self.sort_timeplaceentries(
len(hydpy.pub.timegrids.init), len(self.sequences))
|
def function[shape, parameter[self]]:
constant[Required shape of |NetCDFVariableAgg.array|.
For the default configuration, the first axis corresponds to the
number of devices, and the second one to the number of timesteps.
We show this for the 1-dimensional input sequence |lland_fluxes.NKor|:
>>> from hydpy.core.examples import prepare_io_example_1
>>> nodes, elements = prepare_io_example_1()
>>> from hydpy.core.netcdftools import NetCDFVariableAgg
>>> ncvar = NetCDFVariableAgg('flux_nkor', isolate=False, timeaxis=1)
>>> for element in elements:
... ncvar.log(element.model.sequences.fluxes.nkor, None)
>>> ncvar.shape
(3, 4)
When using the first axis as the "timeaxis", the order of |tuple|
entries turns:
>>> ncvar = NetCDFVariableAgg('flux_nkor', isolate=False, timeaxis=0)
>>> for element in elements:
... ncvar.log(element.model.sequences.fluxes.nkor, None)
>>> ncvar.shape
(4, 3)
]
return[call[name[self].sort_timeplaceentries, parameter[call[name[len], parameter[name[hydpy].pub.timegrids.init]], call[name[len], parameter[name[self].sequences]]]]]
|
keyword[def] identifier[shape] ( identifier[self] )-> identifier[Tuple] [ identifier[int] , identifier[int] ]:
literal[string]
keyword[return] identifier[self] . identifier[sort_timeplaceentries] (
identifier[len] ( identifier[hydpy] . identifier[pub] . identifier[timegrids] . identifier[init] ), identifier[len] ( identifier[self] . identifier[sequences] ))
|
def shape(self) -> Tuple[int, int]:
"""Required shape of |NetCDFVariableAgg.array|.
For the default configuration, the first axis corresponds to the
number of devices, and the second one to the number of timesteps.
We show this for the 1-dimensional input sequence |lland_fluxes.NKor|:
>>> from hydpy.core.examples import prepare_io_example_1
>>> nodes, elements = prepare_io_example_1()
>>> from hydpy.core.netcdftools import NetCDFVariableAgg
>>> ncvar = NetCDFVariableAgg('flux_nkor', isolate=False, timeaxis=1)
>>> for element in elements:
... ncvar.log(element.model.sequences.fluxes.nkor, None)
>>> ncvar.shape
(3, 4)
When using the first axis as the "timeaxis", the order of |tuple|
entries turns:
>>> ncvar = NetCDFVariableAgg('flux_nkor', isolate=False, timeaxis=0)
>>> for element in elements:
... ncvar.log(element.model.sequences.fluxes.nkor, None)
>>> ncvar.shape
(4, 3)
"""
return self.sort_timeplaceentries(len(hydpy.pub.timegrids.init), len(self.sequences))
|
def get_class_hierarchy(self, base_class):
"""
Walks up the class hierarchy and returns a list of all classes between
base class (inclusive) and java.lang.Object (exclusive).
"""
classes = [base_class]
while classes[-1] is not None and classes[-1] != "java.lang.Object":
classes.append(self.get_superclass(classes[-1]))
return classes[:-1]
|
def function[get_class_hierarchy, parameter[self, base_class]]:
constant[
Walks up the class hierarchy and returns a list of all classes between
base class (inclusive) and java.lang.Object (exclusive).
]
variable[classes] assign[=] list[[<ast.Name object at 0x7da18f00fa60>]]
while <ast.BoolOp object at 0x7da18f00cf70> begin[:]
call[name[classes].append, parameter[call[name[self].get_superclass, parameter[call[name[classes]][<ast.UnaryOp object at 0x7da18f00f400>]]]]]
return[call[name[classes]][<ast.Slice object at 0x7da18f00df90>]]
|
keyword[def] identifier[get_class_hierarchy] ( identifier[self] , identifier[base_class] ):
literal[string]
identifier[classes] =[ identifier[base_class] ]
keyword[while] identifier[classes] [- literal[int] ] keyword[is] keyword[not] keyword[None] keyword[and] identifier[classes] [- literal[int] ]!= literal[string] :
identifier[classes] . identifier[append] ( identifier[self] . identifier[get_superclass] ( identifier[classes] [- literal[int] ]))
keyword[return] identifier[classes] [:- literal[int] ]
|
def get_class_hierarchy(self, base_class):
"""
Walks up the class hierarchy and returns a list of all classes between
base class (inclusive) and java.lang.Object (exclusive).
"""
classes = [base_class]
while classes[-1] is not None and classes[-1] != 'java.lang.Object':
classes.append(self.get_superclass(classes[-1])) # depends on [control=['while'], data=[]]
return classes[:-1]
|
def _verify_jws(self, payload, key):
"""Verify the given JWS payload with the given key and return the payload"""
jws = JWS.from_compact(payload)
try:
alg = jws.signature.combined.alg.name
except KeyError:
msg = 'No alg value found in header'
raise SuspiciousOperation(msg)
if alg != self.OIDC_RP_SIGN_ALGO:
msg = "The provider algorithm {!r} does not match the client's " \
"OIDC_RP_SIGN_ALGO.".format(alg)
raise SuspiciousOperation(msg)
if isinstance(key, six.string_types):
# Use smart_bytes here since the key string comes from settings.
jwk = JWK.load(smart_bytes(key))
else:
# The key is a json returned from the IDP JWKS endpoint.
jwk = JWK.from_json(key)
if not jws.verify(jwk):
msg = 'JWS token verification failed.'
raise SuspiciousOperation(msg)
return jws.payload
|
def function[_verify_jws, parameter[self, payload, key]]:
constant[Verify the given JWS payload with the given key and return the payload]
variable[jws] assign[=] call[name[JWS].from_compact, parameter[name[payload]]]
<ast.Try object at 0x7da18bc72fe0>
if compare[name[alg] not_equal[!=] name[self].OIDC_RP_SIGN_ALGO] begin[:]
variable[msg] assign[=] call[constant[The provider algorithm {!r} does not match the client's OIDC_RP_SIGN_ALGO.].format, parameter[name[alg]]]
<ast.Raise object at 0x7da18bc73520>
if call[name[isinstance], parameter[name[key], name[six].string_types]] begin[:]
variable[jwk] assign[=] call[name[JWK].load, parameter[call[name[smart_bytes], parameter[name[key]]]]]
if <ast.UnaryOp object at 0x7da18bc728f0> begin[:]
variable[msg] assign[=] constant[JWS token verification failed.]
<ast.Raise object at 0x7da18bc73970>
return[name[jws].payload]
|
keyword[def] identifier[_verify_jws] ( identifier[self] , identifier[payload] , identifier[key] ):
literal[string]
identifier[jws] = identifier[JWS] . identifier[from_compact] ( identifier[payload] )
keyword[try] :
identifier[alg] = identifier[jws] . identifier[signature] . identifier[combined] . identifier[alg] . identifier[name]
keyword[except] identifier[KeyError] :
identifier[msg] = literal[string]
keyword[raise] identifier[SuspiciousOperation] ( identifier[msg] )
keyword[if] identifier[alg] != identifier[self] . identifier[OIDC_RP_SIGN_ALGO] :
identifier[msg] = literal[string] literal[string] . identifier[format] ( identifier[alg] )
keyword[raise] identifier[SuspiciousOperation] ( identifier[msg] )
keyword[if] identifier[isinstance] ( identifier[key] , identifier[six] . identifier[string_types] ):
identifier[jwk] = identifier[JWK] . identifier[load] ( identifier[smart_bytes] ( identifier[key] ))
keyword[else] :
identifier[jwk] = identifier[JWK] . identifier[from_json] ( identifier[key] )
keyword[if] keyword[not] identifier[jws] . identifier[verify] ( identifier[jwk] ):
identifier[msg] = literal[string]
keyword[raise] identifier[SuspiciousOperation] ( identifier[msg] )
keyword[return] identifier[jws] . identifier[payload]
|
def _verify_jws(self, payload, key):
"""Verify the given JWS payload with the given key and return the payload"""
jws = JWS.from_compact(payload)
try:
alg = jws.signature.combined.alg.name # depends on [control=['try'], data=[]]
except KeyError:
msg = 'No alg value found in header'
raise SuspiciousOperation(msg) # depends on [control=['except'], data=[]]
if alg != self.OIDC_RP_SIGN_ALGO:
msg = "The provider algorithm {!r} does not match the client's OIDC_RP_SIGN_ALGO.".format(alg)
raise SuspiciousOperation(msg) # depends on [control=['if'], data=['alg']]
if isinstance(key, six.string_types):
# Use smart_bytes here since the key string comes from settings.
jwk = JWK.load(smart_bytes(key)) # depends on [control=['if'], data=[]]
else:
# The key is a json returned from the IDP JWKS endpoint.
jwk = JWK.from_json(key)
if not jws.verify(jwk):
msg = 'JWS token verification failed.'
raise SuspiciousOperation(msg) # depends on [control=['if'], data=[]]
return jws.payload
|
def add_external_reference_to_entity(self,entity_id,ext_ref):
"""
Adds an external reference to a entity specified by the entity identifier
@param entity_id: the entity identifier
@type entity_id: string
@param ext_ref: the external reference
@type ext_ref: L{CexternalReference}
"""
node_entity = self.map_entity_id_to_node.get(entity_id)
if node_entity is not None:
entity = Centity(node_entity,self.type)
entity.add_external_reference(ext_ref)
else:
print>>sys.stderr,'Trying to add a reference to the entity',entity_id,'but can not be found in this file'
|
def function[add_external_reference_to_entity, parameter[self, entity_id, ext_ref]]:
constant[
Adds an external reference to a entity specified by the entity identifier
@param entity_id: the entity identifier
@type entity_id: string
@param ext_ref: the external reference
@type ext_ref: L{CexternalReference}
]
variable[node_entity] assign[=] call[name[self].map_entity_id_to_node.get, parameter[name[entity_id]]]
if compare[name[node_entity] is_not constant[None]] begin[:]
variable[entity] assign[=] call[name[Centity], parameter[name[node_entity], name[self].type]]
call[name[entity].add_external_reference, parameter[name[ext_ref]]]
|
keyword[def] identifier[add_external_reference_to_entity] ( identifier[self] , identifier[entity_id] , identifier[ext_ref] ):
literal[string]
identifier[node_entity] = identifier[self] . identifier[map_entity_id_to_node] . identifier[get] ( identifier[entity_id] )
keyword[if] identifier[node_entity] keyword[is] keyword[not] keyword[None] :
identifier[entity] = identifier[Centity] ( identifier[node_entity] , identifier[self] . identifier[type] )
identifier[entity] . identifier[add_external_reference] ( identifier[ext_ref] )
keyword[else] :
identifier[print] >> identifier[sys] . identifier[stderr] , literal[string] , identifier[entity_id] , literal[string]
|
def add_external_reference_to_entity(self, entity_id, ext_ref):
"""
Adds an external reference to a entity specified by the entity identifier
@param entity_id: the entity identifier
@type entity_id: string
@param ext_ref: the external reference
@type ext_ref: L{CexternalReference}
"""
node_entity = self.map_entity_id_to_node.get(entity_id)
if node_entity is not None:
entity = Centity(node_entity, self.type)
entity.add_external_reference(ext_ref) # depends on [control=['if'], data=['node_entity']]
else:
(print >> sys.stderr, 'Trying to add a reference to the entity', entity_id, 'but can not be found in this file')
|
def encoded(self):
"""
Returns an encoded form of the query
"""
if not self._encoded:
self._encoded = self._encode()
if self._base_str:
return '&'.join((self._base_str, self._encoded))
else:
return self._encoded
|
def function[encoded, parameter[self]]:
constant[
Returns an encoded form of the query
]
if <ast.UnaryOp object at 0x7da2054a6a70> begin[:]
name[self]._encoded assign[=] call[name[self]._encode, parameter[]]
if name[self]._base_str begin[:]
return[call[constant[&].join, parameter[tuple[[<ast.Attribute object at 0x7da2054a79d0>, <ast.Attribute object at 0x7da2054a5c90>]]]]]
|
keyword[def] identifier[encoded] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_encoded] :
identifier[self] . identifier[_encoded] = identifier[self] . identifier[_encode] ()
keyword[if] identifier[self] . identifier[_base_str] :
keyword[return] literal[string] . identifier[join] (( identifier[self] . identifier[_base_str] , identifier[self] . identifier[_encoded] ))
keyword[else] :
keyword[return] identifier[self] . identifier[_encoded]
|
def encoded(self):
"""
Returns an encoded form of the query
"""
if not self._encoded:
self._encoded = self._encode() # depends on [control=['if'], data=[]]
if self._base_str:
return '&'.join((self._base_str, self._encoded)) # depends on [control=['if'], data=[]]
else:
return self._encoded
|
def user_organisations_resource(doc):
"""Get user.organisations subresouces"""
if doc.get('type') == 'user':
for org_id, resource in doc.get('organisations', {}).items():
resource['id'] = org_id
resource['user_id'] = doc['_id']
yield [doc['_id'], org_id], resource
|
def function[user_organisations_resource, parameter[doc]]:
constant[Get user.organisations subresouces]
if compare[call[name[doc].get, parameter[constant[type]]] equal[==] constant[user]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da2041d9b10>, <ast.Name object at 0x7da2041d8550>]]] in starred[call[call[name[doc].get, parameter[constant[organisations], dictionary[[], []]]].items, parameter[]]] begin[:]
call[name[resource]][constant[id]] assign[=] name[org_id]
call[name[resource]][constant[user_id]] assign[=] call[name[doc]][constant[_id]]
<ast.Yield object at 0x7da2041db6d0>
|
keyword[def] identifier[user_organisations_resource] ( identifier[doc] ):
literal[string]
keyword[if] identifier[doc] . identifier[get] ( literal[string] )== literal[string] :
keyword[for] identifier[org_id] , identifier[resource] keyword[in] identifier[doc] . identifier[get] ( literal[string] ,{}). identifier[items] ():
identifier[resource] [ literal[string] ]= identifier[org_id]
identifier[resource] [ literal[string] ]= identifier[doc] [ literal[string] ]
keyword[yield] [ identifier[doc] [ literal[string] ], identifier[org_id] ], identifier[resource]
|
def user_organisations_resource(doc):
"""Get user.organisations subresouces"""
if doc.get('type') == 'user':
for (org_id, resource) in doc.get('organisations', {}).items():
resource['id'] = org_id
resource['user_id'] = doc['_id']
yield ([doc['_id'], org_id], resource) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
|
def command(func):
'''
A decorator to create a function with docopt arguments. It also generates a help function
@command
def do_myfunc(self, args):
""" docopts text """
pass
will create
def do_myfunc(self, args, arguments):
""" docopts text """
...
def help_myfunc(self, args, arguments):
... prints the docopt text ...
:param func: the function for the decorator
'''
classname = inspect.getouterframes(inspect.currentframe())[1][3]
name = func.__name__
help_name = name.replace("do_", "help_")
doc = textwrap.dedent(func.__doc__)
def new(instance, args):
# instance.new.__doc__ = doc
try:
argv = shlex.split(args)
arguments = docopt(doc, help=True, argv=argv)
func(instance, args, arguments)
except SystemExit:
if args not in ('-h', '--help'):
Console.error("Could not execute the command.")
print(doc)
new.__doc__ = doc
return new
|
def function[command, parameter[func]]:
constant[
A decorator to create a function with docopt arguments. It also generates a help function
@command
def do_myfunc(self, args):
""" docopts text """
pass
will create
def do_myfunc(self, args, arguments):
""" docopts text """
...
def help_myfunc(self, args, arguments):
... prints the docopt text ...
:param func: the function for the decorator
]
variable[classname] assign[=] call[call[call[name[inspect].getouterframes, parameter[call[name[inspect].currentframe, parameter[]]]]][constant[1]]][constant[3]]
variable[name] assign[=] name[func].__name__
variable[help_name] assign[=] call[name[name].replace, parameter[constant[do_], constant[help_]]]
variable[doc] assign[=] call[name[textwrap].dedent, parameter[name[func].__doc__]]
def function[new, parameter[instance, args]]:
<ast.Try object at 0x7da20c7c9390>
name[new].__doc__ assign[=] name[doc]
return[name[new]]
|
keyword[def] identifier[command] ( identifier[func] ):
literal[string]
identifier[classname] = identifier[inspect] . identifier[getouterframes] ( identifier[inspect] . identifier[currentframe] ())[ literal[int] ][ literal[int] ]
identifier[name] = identifier[func] . identifier[__name__]
identifier[help_name] = identifier[name] . identifier[replace] ( literal[string] , literal[string] )
identifier[doc] = identifier[textwrap] . identifier[dedent] ( identifier[func] . identifier[__doc__] )
keyword[def] identifier[new] ( identifier[instance] , identifier[args] ):
keyword[try] :
identifier[argv] = identifier[shlex] . identifier[split] ( identifier[args] )
identifier[arguments] = identifier[docopt] ( identifier[doc] , identifier[help] = keyword[True] , identifier[argv] = identifier[argv] )
identifier[func] ( identifier[instance] , identifier[args] , identifier[arguments] )
keyword[except] identifier[SystemExit] :
keyword[if] identifier[args] keyword[not] keyword[in] ( literal[string] , literal[string] ):
identifier[Console] . identifier[error] ( literal[string] )
identifier[print] ( identifier[doc] )
identifier[new] . identifier[__doc__] = identifier[doc]
keyword[return] identifier[new]
|
def command(func):
'''
A decorator to create a function with docopt arguments. It also generates a help function
@command
def do_myfunc(self, args):
""" docopts text """
pass
will create
def do_myfunc(self, args, arguments):
""" docopts text """
...
def help_myfunc(self, args, arguments):
... prints the docopt text ...
:param func: the function for the decorator
'''
classname = inspect.getouterframes(inspect.currentframe())[1][3]
name = func.__name__
help_name = name.replace('do_', 'help_')
doc = textwrap.dedent(func.__doc__)
def new(instance, args):
# instance.new.__doc__ = doc
try:
argv = shlex.split(args)
arguments = docopt(doc, help=True, argv=argv)
func(instance, args, arguments) # depends on [control=['try'], data=[]]
except SystemExit:
if args not in ('-h', '--help'):
Console.error('Could not execute the command.') # depends on [control=['if'], data=[]]
print(doc) # depends on [control=['except'], data=[]]
new.__doc__ = doc
return new
|
def prep(config=None, path=None):
"""Prepare to read the configuration information."""
if config is None:
config = parse()
if path is None:
path = os.getcwd()
root = config.get('root', 'path')
root = os.path.join(path, root)
root = os.path.realpath(root)
os.environ['SCIDASH_HOME'] = root
if sys.path[0] != root:
sys.path.insert(0, root)
|
def function[prep, parameter[config, path]]:
constant[Prepare to read the configuration information.]
if compare[name[config] is constant[None]] begin[:]
variable[config] assign[=] call[name[parse], parameter[]]
if compare[name[path] is constant[None]] begin[:]
variable[path] assign[=] call[name[os].getcwd, parameter[]]
variable[root] assign[=] call[name[config].get, parameter[constant[root], constant[path]]]
variable[root] assign[=] call[name[os].path.join, parameter[name[path], name[root]]]
variable[root] assign[=] call[name[os].path.realpath, parameter[name[root]]]
call[name[os].environ][constant[SCIDASH_HOME]] assign[=] name[root]
if compare[call[name[sys].path][constant[0]] not_equal[!=] name[root]] begin[:]
call[name[sys].path.insert, parameter[constant[0], name[root]]]
|
keyword[def] identifier[prep] ( identifier[config] = keyword[None] , identifier[path] = keyword[None] ):
literal[string]
keyword[if] identifier[config] keyword[is] keyword[None] :
identifier[config] = identifier[parse] ()
keyword[if] identifier[path] keyword[is] keyword[None] :
identifier[path] = identifier[os] . identifier[getcwd] ()
identifier[root] = identifier[config] . identifier[get] ( literal[string] , literal[string] )
identifier[root] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[root] )
identifier[root] = identifier[os] . identifier[path] . identifier[realpath] ( identifier[root] )
identifier[os] . identifier[environ] [ literal[string] ]= identifier[root]
keyword[if] identifier[sys] . identifier[path] [ literal[int] ]!= identifier[root] :
identifier[sys] . identifier[path] . identifier[insert] ( literal[int] , identifier[root] )
|
def prep(config=None, path=None):
"""Prepare to read the configuration information."""
if config is None:
config = parse() # depends on [control=['if'], data=['config']]
if path is None:
path = os.getcwd() # depends on [control=['if'], data=['path']]
root = config.get('root', 'path')
root = os.path.join(path, root)
root = os.path.realpath(root)
os.environ['SCIDASH_HOME'] = root
if sys.path[0] != root:
sys.path.insert(0, root) # depends on [control=['if'], data=['root']]
|
def safe_serialize_type(l):
'''serialize only with letters, numbers and _'''
if isinstance(l, str):
return l
elif isinstance(l, list):
return '%s_%s_' % (l[0], ''.join(map(safe_serialize_type, l[1:])))
else:
return str(l)
|
def function[safe_serialize_type, parameter[l]]:
constant[serialize only with letters, numbers and _]
if call[name[isinstance], parameter[name[l], name[str]]] begin[:]
return[name[l]]
|
keyword[def] identifier[safe_serialize_type] ( identifier[l] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[l] , identifier[str] ):
keyword[return] identifier[l]
keyword[elif] identifier[isinstance] ( identifier[l] , identifier[list] ):
keyword[return] literal[string] %( identifier[l] [ literal[int] ], literal[string] . identifier[join] ( identifier[map] ( identifier[safe_serialize_type] , identifier[l] [ literal[int] :])))
keyword[else] :
keyword[return] identifier[str] ( identifier[l] )
|
def safe_serialize_type(l):
"""serialize only with letters, numbers and _"""
if isinstance(l, str):
return l # depends on [control=['if'], data=[]]
elif isinstance(l, list):
return '%s_%s_' % (l[0], ''.join(map(safe_serialize_type, l[1:]))) # depends on [control=['if'], data=[]]
else:
return str(l)
|
def keys(self, key=None, reverse=False):
"""sort the keys before returning them"""
ks = sorted(list(dict.keys(self)), key=key, reverse=reverse)
return ks
|
def function[keys, parameter[self, key, reverse]]:
constant[sort the keys before returning them]
variable[ks] assign[=] call[name[sorted], parameter[call[name[list], parameter[call[name[dict].keys, parameter[name[self]]]]]]]
return[name[ks]]
|
keyword[def] identifier[keys] ( identifier[self] , identifier[key] = keyword[None] , identifier[reverse] = keyword[False] ):
literal[string]
identifier[ks] = identifier[sorted] ( identifier[list] ( identifier[dict] . identifier[keys] ( identifier[self] )), identifier[key] = identifier[key] , identifier[reverse] = identifier[reverse] )
keyword[return] identifier[ks]
|
def keys(self, key=None, reverse=False):
"""sort the keys before returning them"""
ks = sorted(list(dict.keys(self)), key=key, reverse=reverse)
return ks
|
def list(self, **params):
"""
Retrieve visit outcomes
Returns Visit Outcomes, according to the parameters provided
:calls: ``get /visit_outcomes``
:param dict params: (optional) Search options.
:return: List of dictionaries that support attriubte-style access, which represent collection of VisitOutcomes.
:rtype: list
"""
_, _, visit_outcomes = self.http_client.get("/visit_outcomes", params=params)
return visit_outcomes
|
def function[list, parameter[self]]:
constant[
Retrieve visit outcomes
Returns Visit Outcomes, according to the parameters provided
:calls: ``get /visit_outcomes``
:param dict params: (optional) Search options.
:return: List of dictionaries that support attriubte-style access, which represent collection of VisitOutcomes.
:rtype: list
]
<ast.Tuple object at 0x7da20c76d420> assign[=] call[name[self].http_client.get, parameter[constant[/visit_outcomes]]]
return[name[visit_outcomes]]
|
keyword[def] identifier[list] ( identifier[self] ,** identifier[params] ):
literal[string]
identifier[_] , identifier[_] , identifier[visit_outcomes] = identifier[self] . identifier[http_client] . identifier[get] ( literal[string] , identifier[params] = identifier[params] )
keyword[return] identifier[visit_outcomes]
|
def list(self, **params):
"""
Retrieve visit outcomes
Returns Visit Outcomes, according to the parameters provided
:calls: ``get /visit_outcomes``
:param dict params: (optional) Search options.
:return: List of dictionaries that support attriubte-style access, which represent collection of VisitOutcomes.
:rtype: list
"""
(_, _, visit_outcomes) = self.http_client.get('/visit_outcomes', params=params)
return visit_outcomes
|
def _match_one(self, rec, tests):
"""Check if a specific record matches tests."""
for key,test in tests.iteritems():
if not test(rec.get(key, None)):
return False
return True
|
def function[_match_one, parameter[self, rec, tests]]:
constant[Check if a specific record matches tests.]
for taget[tuple[[<ast.Name object at 0x7da18ede70a0>, <ast.Name object at 0x7da18ede74f0>]]] in starred[call[name[tests].iteritems, parameter[]]] begin[:]
if <ast.UnaryOp object at 0x7da18ede7520> begin[:]
return[constant[False]]
return[constant[True]]
|
keyword[def] identifier[_match_one] ( identifier[self] , identifier[rec] , identifier[tests] ):
literal[string]
keyword[for] identifier[key] , identifier[test] keyword[in] identifier[tests] . identifier[iteritems] ():
keyword[if] keyword[not] identifier[test] ( identifier[rec] . identifier[get] ( identifier[key] , keyword[None] )):
keyword[return] keyword[False]
keyword[return] keyword[True]
|
def _match_one(self, rec, tests):
"""Check if a specific record matches tests."""
for (key, test) in tests.iteritems():
if not test(rec.get(key, None)):
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return True
|
def solar_position_loop(unixtime, loc_args, out):
"""Loop through the time array and calculate the solar position"""
lat = loc_args[0]
lon = loc_args[1]
elev = loc_args[2]
pressure = loc_args[3]
temp = loc_args[4]
delta_t = loc_args[5]
atmos_refract = loc_args[6]
sst = loc_args[7]
esd = loc_args[8]
for i in range(unixtime.shape[0]):
utime = unixtime[i]
jd = julian_day(utime)
jde = julian_ephemeris_day(jd, delta_t)
jc = julian_century(jd)
jce = julian_ephemeris_century(jde)
jme = julian_ephemeris_millennium(jce)
R = heliocentric_radius_vector(jme)
if esd:
out[0, i] = R
continue
L = heliocentric_longitude(jme)
B = heliocentric_latitude(jme)
Theta = geocentric_longitude(L)
beta = geocentric_latitude(B)
x0 = mean_elongation(jce)
x1 = mean_anomaly_sun(jce)
x2 = mean_anomaly_moon(jce)
x3 = moon_argument_latitude(jce)
x4 = moon_ascending_longitude(jce)
delta_psi = longitude_nutation(jce, x0, x1, x2, x3, x4)
delta_epsilon = obliquity_nutation(jce, x0, x1, x2, x3, x4)
epsilon0 = mean_ecliptic_obliquity(jme)
epsilon = true_ecliptic_obliquity(epsilon0, delta_epsilon)
delta_tau = aberration_correction(R)
lamd = apparent_sun_longitude(Theta, delta_psi, delta_tau)
v0 = mean_sidereal_time(jd, jc)
v = apparent_sidereal_time(v0, delta_psi, epsilon)
alpha = geocentric_sun_right_ascension(lamd, epsilon, beta)
delta = geocentric_sun_declination(lamd, epsilon, beta)
if sst:
out[0, i] = v
out[1, i] = alpha
out[2, i] = delta
continue
m = sun_mean_longitude(jme)
eot = equation_of_time(m, alpha, delta_psi, epsilon)
H = local_hour_angle(v, lon, alpha)
xi = equatorial_horizontal_parallax(R)
u = uterm(lat)
x = xterm(u, lat, elev)
y = yterm(u, lat, elev)
delta_alpha = parallax_sun_right_ascension(x, xi, H, delta)
delta_prime = topocentric_sun_declination(delta, x, y, xi, delta_alpha,
H)
H_prime = topocentric_local_hour_angle(H, delta_alpha)
e0 = topocentric_elevation_angle_without_atmosphere(lat, delta_prime,
H_prime)
delta_e = atmospheric_refraction_correction(pressure, temp, e0,
atmos_refract)
e = topocentric_elevation_angle(e0, delta_e)
theta = topocentric_zenith_angle(e)
theta0 = topocentric_zenith_angle(e0)
gamma = topocentric_astronomers_azimuth(H_prime, delta_prime, lat)
phi = topocentric_azimuth_angle(gamma)
out[0, i] = theta
out[1, i] = theta0
out[2, i] = e
out[3, i] = e0
out[4, i] = phi
out[5, i] = eot
|
def function[solar_position_loop, parameter[unixtime, loc_args, out]]:
constant[Loop through the time array and calculate the solar position]
variable[lat] assign[=] call[name[loc_args]][constant[0]]
variable[lon] assign[=] call[name[loc_args]][constant[1]]
variable[elev] assign[=] call[name[loc_args]][constant[2]]
variable[pressure] assign[=] call[name[loc_args]][constant[3]]
variable[temp] assign[=] call[name[loc_args]][constant[4]]
variable[delta_t] assign[=] call[name[loc_args]][constant[5]]
variable[atmos_refract] assign[=] call[name[loc_args]][constant[6]]
variable[sst] assign[=] call[name[loc_args]][constant[7]]
variable[esd] assign[=] call[name[loc_args]][constant[8]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[unixtime].shape][constant[0]]]]] begin[:]
variable[utime] assign[=] call[name[unixtime]][name[i]]
variable[jd] assign[=] call[name[julian_day], parameter[name[utime]]]
variable[jde] assign[=] call[name[julian_ephemeris_day], parameter[name[jd], name[delta_t]]]
variable[jc] assign[=] call[name[julian_century], parameter[name[jd]]]
variable[jce] assign[=] call[name[julian_ephemeris_century], parameter[name[jde]]]
variable[jme] assign[=] call[name[julian_ephemeris_millennium], parameter[name[jce]]]
variable[R] assign[=] call[name[heliocentric_radius_vector], parameter[name[jme]]]
if name[esd] begin[:]
call[name[out]][tuple[[<ast.Constant object at 0x7da1b1a65e10>, <ast.Name object at 0x7da1b1a66350>]]] assign[=] name[R]
continue
variable[L] assign[=] call[name[heliocentric_longitude], parameter[name[jme]]]
variable[B] assign[=] call[name[heliocentric_latitude], parameter[name[jme]]]
variable[Theta] assign[=] call[name[geocentric_longitude], parameter[name[L]]]
variable[beta] assign[=] call[name[geocentric_latitude], parameter[name[B]]]
variable[x0] assign[=] call[name[mean_elongation], parameter[name[jce]]]
variable[x1] assign[=] call[name[mean_anomaly_sun], parameter[name[jce]]]
variable[x2] assign[=] call[name[mean_anomaly_moon], parameter[name[jce]]]
variable[x3] assign[=] call[name[moon_argument_latitude], parameter[name[jce]]]
variable[x4] assign[=] call[name[moon_ascending_longitude], parameter[name[jce]]]
variable[delta_psi] assign[=] call[name[longitude_nutation], parameter[name[jce], name[x0], name[x1], name[x2], name[x3], name[x4]]]
variable[delta_epsilon] assign[=] call[name[obliquity_nutation], parameter[name[jce], name[x0], name[x1], name[x2], name[x3], name[x4]]]
variable[epsilon0] assign[=] call[name[mean_ecliptic_obliquity], parameter[name[jme]]]
variable[epsilon] assign[=] call[name[true_ecliptic_obliquity], parameter[name[epsilon0], name[delta_epsilon]]]
variable[delta_tau] assign[=] call[name[aberration_correction], parameter[name[R]]]
variable[lamd] assign[=] call[name[apparent_sun_longitude], parameter[name[Theta], name[delta_psi], name[delta_tau]]]
variable[v0] assign[=] call[name[mean_sidereal_time], parameter[name[jd], name[jc]]]
variable[v] assign[=] call[name[apparent_sidereal_time], parameter[name[v0], name[delta_psi], name[epsilon]]]
variable[alpha] assign[=] call[name[geocentric_sun_right_ascension], parameter[name[lamd], name[epsilon], name[beta]]]
variable[delta] assign[=] call[name[geocentric_sun_declination], parameter[name[lamd], name[epsilon], name[beta]]]
if name[sst] begin[:]
call[name[out]][tuple[[<ast.Constant object at 0x7da1b26adff0>, <ast.Name object at 0x7da1b26ae830>]]] assign[=] name[v]
call[name[out]][tuple[[<ast.Constant object at 0x7da1b26ae0e0>, <ast.Name object at 0x7da1b26ad870>]]] assign[=] name[alpha]
call[name[out]][tuple[[<ast.Constant object at 0x7da1b26aed40>, <ast.Name object at 0x7da1b26afa90>]]] assign[=] name[delta]
continue
variable[m] assign[=] call[name[sun_mean_longitude], parameter[name[jme]]]
variable[eot] assign[=] call[name[equation_of_time], parameter[name[m], name[alpha], name[delta_psi], name[epsilon]]]
variable[H] assign[=] call[name[local_hour_angle], parameter[name[v], name[lon], name[alpha]]]
variable[xi] assign[=] call[name[equatorial_horizontal_parallax], parameter[name[R]]]
variable[u] assign[=] call[name[uterm], parameter[name[lat]]]
variable[x] assign[=] call[name[xterm], parameter[name[u], name[lat], name[elev]]]
variable[y] assign[=] call[name[yterm], parameter[name[u], name[lat], name[elev]]]
variable[delta_alpha] assign[=] call[name[parallax_sun_right_ascension], parameter[name[x], name[xi], name[H], name[delta]]]
variable[delta_prime] assign[=] call[name[topocentric_sun_declination], parameter[name[delta], name[x], name[y], name[xi], name[delta_alpha], name[H]]]
variable[H_prime] assign[=] call[name[topocentric_local_hour_angle], parameter[name[H], name[delta_alpha]]]
variable[e0] assign[=] call[name[topocentric_elevation_angle_without_atmosphere], parameter[name[lat], name[delta_prime], name[H_prime]]]
variable[delta_e] assign[=] call[name[atmospheric_refraction_correction], parameter[name[pressure], name[temp], name[e0], name[atmos_refract]]]
variable[e] assign[=] call[name[topocentric_elevation_angle], parameter[name[e0], name[delta_e]]]
variable[theta] assign[=] call[name[topocentric_zenith_angle], parameter[name[e]]]
variable[theta0] assign[=] call[name[topocentric_zenith_angle], parameter[name[e0]]]
variable[gamma] assign[=] call[name[topocentric_astronomers_azimuth], parameter[name[H_prime], name[delta_prime], name[lat]]]
variable[phi] assign[=] call[name[topocentric_azimuth_angle], parameter[name[gamma]]]
call[name[out]][tuple[[<ast.Constant object at 0x7da1b26adab0>, <ast.Name object at 0x7da1b26af6d0>]]] assign[=] name[theta]
call[name[out]][tuple[[<ast.Constant object at 0x7da1b26adc30>, <ast.Name object at 0x7da1b26ae500>]]] assign[=] name[theta0]
call[name[out]][tuple[[<ast.Constant object at 0x7da1b26acc40>, <ast.Name object at 0x7da1b26ae1d0>]]] assign[=] name[e]
call[name[out]][tuple[[<ast.Constant object at 0x7da1b26ad660>, <ast.Name object at 0x7da1b26acfa0>]]] assign[=] name[e0]
call[name[out]][tuple[[<ast.Constant object at 0x7da1b26afd00>, <ast.Name object at 0x7da1b26ad900>]]] assign[=] name[phi]
call[name[out]][tuple[[<ast.Constant object at 0x7da1b26ac070>, <ast.Name object at 0x7da1b26ac400>]]] assign[=] name[eot]
|
keyword[def] identifier[solar_position_loop] ( identifier[unixtime] , identifier[loc_args] , identifier[out] ):
literal[string]
identifier[lat] = identifier[loc_args] [ literal[int] ]
identifier[lon] = identifier[loc_args] [ literal[int] ]
identifier[elev] = identifier[loc_args] [ literal[int] ]
identifier[pressure] = identifier[loc_args] [ literal[int] ]
identifier[temp] = identifier[loc_args] [ literal[int] ]
identifier[delta_t] = identifier[loc_args] [ literal[int] ]
identifier[atmos_refract] = identifier[loc_args] [ literal[int] ]
identifier[sst] = identifier[loc_args] [ literal[int] ]
identifier[esd] = identifier[loc_args] [ literal[int] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[unixtime] . identifier[shape] [ literal[int] ]):
identifier[utime] = identifier[unixtime] [ identifier[i] ]
identifier[jd] = identifier[julian_day] ( identifier[utime] )
identifier[jde] = identifier[julian_ephemeris_day] ( identifier[jd] , identifier[delta_t] )
identifier[jc] = identifier[julian_century] ( identifier[jd] )
identifier[jce] = identifier[julian_ephemeris_century] ( identifier[jde] )
identifier[jme] = identifier[julian_ephemeris_millennium] ( identifier[jce] )
identifier[R] = identifier[heliocentric_radius_vector] ( identifier[jme] )
keyword[if] identifier[esd] :
identifier[out] [ literal[int] , identifier[i] ]= identifier[R]
keyword[continue]
identifier[L] = identifier[heliocentric_longitude] ( identifier[jme] )
identifier[B] = identifier[heliocentric_latitude] ( identifier[jme] )
identifier[Theta] = identifier[geocentric_longitude] ( identifier[L] )
identifier[beta] = identifier[geocentric_latitude] ( identifier[B] )
identifier[x0] = identifier[mean_elongation] ( identifier[jce] )
identifier[x1] = identifier[mean_anomaly_sun] ( identifier[jce] )
identifier[x2] = identifier[mean_anomaly_moon] ( identifier[jce] )
identifier[x3] = identifier[moon_argument_latitude] ( identifier[jce] )
identifier[x4] = identifier[moon_ascending_longitude] ( identifier[jce] )
identifier[delta_psi] = identifier[longitude_nutation] ( identifier[jce] , identifier[x0] , identifier[x1] , identifier[x2] , identifier[x3] , identifier[x4] )
identifier[delta_epsilon] = identifier[obliquity_nutation] ( identifier[jce] , identifier[x0] , identifier[x1] , identifier[x2] , identifier[x3] , identifier[x4] )
identifier[epsilon0] = identifier[mean_ecliptic_obliquity] ( identifier[jme] )
identifier[epsilon] = identifier[true_ecliptic_obliquity] ( identifier[epsilon0] , identifier[delta_epsilon] )
identifier[delta_tau] = identifier[aberration_correction] ( identifier[R] )
identifier[lamd] = identifier[apparent_sun_longitude] ( identifier[Theta] , identifier[delta_psi] , identifier[delta_tau] )
identifier[v0] = identifier[mean_sidereal_time] ( identifier[jd] , identifier[jc] )
identifier[v] = identifier[apparent_sidereal_time] ( identifier[v0] , identifier[delta_psi] , identifier[epsilon] )
identifier[alpha] = identifier[geocentric_sun_right_ascension] ( identifier[lamd] , identifier[epsilon] , identifier[beta] )
identifier[delta] = identifier[geocentric_sun_declination] ( identifier[lamd] , identifier[epsilon] , identifier[beta] )
keyword[if] identifier[sst] :
identifier[out] [ literal[int] , identifier[i] ]= identifier[v]
identifier[out] [ literal[int] , identifier[i] ]= identifier[alpha]
identifier[out] [ literal[int] , identifier[i] ]= identifier[delta]
keyword[continue]
identifier[m] = identifier[sun_mean_longitude] ( identifier[jme] )
identifier[eot] = identifier[equation_of_time] ( identifier[m] , identifier[alpha] , identifier[delta_psi] , identifier[epsilon] )
identifier[H] = identifier[local_hour_angle] ( identifier[v] , identifier[lon] , identifier[alpha] )
identifier[xi] = identifier[equatorial_horizontal_parallax] ( identifier[R] )
identifier[u] = identifier[uterm] ( identifier[lat] )
identifier[x] = identifier[xterm] ( identifier[u] , identifier[lat] , identifier[elev] )
identifier[y] = identifier[yterm] ( identifier[u] , identifier[lat] , identifier[elev] )
identifier[delta_alpha] = identifier[parallax_sun_right_ascension] ( identifier[x] , identifier[xi] , identifier[H] , identifier[delta] )
identifier[delta_prime] = identifier[topocentric_sun_declination] ( identifier[delta] , identifier[x] , identifier[y] , identifier[xi] , identifier[delta_alpha] ,
identifier[H] )
identifier[H_prime] = identifier[topocentric_local_hour_angle] ( identifier[H] , identifier[delta_alpha] )
identifier[e0] = identifier[topocentric_elevation_angle_without_atmosphere] ( identifier[lat] , identifier[delta_prime] ,
identifier[H_prime] )
identifier[delta_e] = identifier[atmospheric_refraction_correction] ( identifier[pressure] , identifier[temp] , identifier[e0] ,
identifier[atmos_refract] )
identifier[e] = identifier[topocentric_elevation_angle] ( identifier[e0] , identifier[delta_e] )
identifier[theta] = identifier[topocentric_zenith_angle] ( identifier[e] )
identifier[theta0] = identifier[topocentric_zenith_angle] ( identifier[e0] )
identifier[gamma] = identifier[topocentric_astronomers_azimuth] ( identifier[H_prime] , identifier[delta_prime] , identifier[lat] )
identifier[phi] = identifier[topocentric_azimuth_angle] ( identifier[gamma] )
identifier[out] [ literal[int] , identifier[i] ]= identifier[theta]
identifier[out] [ literal[int] , identifier[i] ]= identifier[theta0]
identifier[out] [ literal[int] , identifier[i] ]= identifier[e]
identifier[out] [ literal[int] , identifier[i] ]= identifier[e0]
identifier[out] [ literal[int] , identifier[i] ]= identifier[phi]
identifier[out] [ literal[int] , identifier[i] ]= identifier[eot]
|
def solar_position_loop(unixtime, loc_args, out):
"""Loop through the time array and calculate the solar position"""
lat = loc_args[0]
lon = loc_args[1]
elev = loc_args[2]
pressure = loc_args[3]
temp = loc_args[4]
delta_t = loc_args[5]
atmos_refract = loc_args[6]
sst = loc_args[7]
esd = loc_args[8]
for i in range(unixtime.shape[0]):
utime = unixtime[i]
jd = julian_day(utime)
jde = julian_ephemeris_day(jd, delta_t)
jc = julian_century(jd)
jce = julian_ephemeris_century(jde)
jme = julian_ephemeris_millennium(jce)
R = heliocentric_radius_vector(jme)
if esd:
out[0, i] = R
continue # depends on [control=['if'], data=[]]
L = heliocentric_longitude(jme)
B = heliocentric_latitude(jme)
Theta = geocentric_longitude(L)
beta = geocentric_latitude(B)
x0 = mean_elongation(jce)
x1 = mean_anomaly_sun(jce)
x2 = mean_anomaly_moon(jce)
x3 = moon_argument_latitude(jce)
x4 = moon_ascending_longitude(jce)
delta_psi = longitude_nutation(jce, x0, x1, x2, x3, x4)
delta_epsilon = obliquity_nutation(jce, x0, x1, x2, x3, x4)
epsilon0 = mean_ecliptic_obliquity(jme)
epsilon = true_ecliptic_obliquity(epsilon0, delta_epsilon)
delta_tau = aberration_correction(R)
lamd = apparent_sun_longitude(Theta, delta_psi, delta_tau)
v0 = mean_sidereal_time(jd, jc)
v = apparent_sidereal_time(v0, delta_psi, epsilon)
alpha = geocentric_sun_right_ascension(lamd, epsilon, beta)
delta = geocentric_sun_declination(lamd, epsilon, beta)
if sst:
out[0, i] = v
out[1, i] = alpha
out[2, i] = delta
continue # depends on [control=['if'], data=[]]
m = sun_mean_longitude(jme)
eot = equation_of_time(m, alpha, delta_psi, epsilon)
H = local_hour_angle(v, lon, alpha)
xi = equatorial_horizontal_parallax(R)
u = uterm(lat)
x = xterm(u, lat, elev)
y = yterm(u, lat, elev)
delta_alpha = parallax_sun_right_ascension(x, xi, H, delta)
delta_prime = topocentric_sun_declination(delta, x, y, xi, delta_alpha, H)
H_prime = topocentric_local_hour_angle(H, delta_alpha)
e0 = topocentric_elevation_angle_without_atmosphere(lat, delta_prime, H_prime)
delta_e = atmospheric_refraction_correction(pressure, temp, e0, atmos_refract)
e = topocentric_elevation_angle(e0, delta_e)
theta = topocentric_zenith_angle(e)
theta0 = topocentric_zenith_angle(e0)
gamma = topocentric_astronomers_azimuth(H_prime, delta_prime, lat)
phi = topocentric_azimuth_angle(gamma)
out[0, i] = theta
out[1, i] = theta0
out[2, i] = e
out[3, i] = e0
out[4, i] = phi
out[5, i] = eot # depends on [control=['for'], data=['i']]
|
def _agent_is_gene(agent, specific_only):
"""Returns whether an agent is for a gene.
Parameters
----------
agent: Agent
The agent to evaluate
specific_only : Optional[bool]
If True, only elementary genes/proteins evaluate as genes and families
will be filtered out. If False, families are also included.
Returns
-------
is_gene: bool
Whether the agent is a gene
"""
if not specific_only:
if not(agent.db_refs.get('HGNC') or \
agent.db_refs.get('UP') or \
agent.db_refs.get('FPLX')):
return False
else:
if not(agent.db_refs.get('HGNC') or \
agent.db_refs.get('UP')):
return False
return True
|
def function[_agent_is_gene, parameter[agent, specific_only]]:
constant[Returns whether an agent is for a gene.
Parameters
----------
agent: Agent
The agent to evaluate
specific_only : Optional[bool]
If True, only elementary genes/proteins evaluate as genes and families
will be filtered out. If False, families are also included.
Returns
-------
is_gene: bool
Whether the agent is a gene
]
if <ast.UnaryOp object at 0x7da1b23444c0> begin[:]
if <ast.UnaryOp object at 0x7da1b23457e0> begin[:]
return[constant[False]]
return[constant[True]]
|
keyword[def] identifier[_agent_is_gene] ( identifier[agent] , identifier[specific_only] ):
literal[string]
keyword[if] keyword[not] identifier[specific_only] :
keyword[if] keyword[not] ( identifier[agent] . identifier[db_refs] . identifier[get] ( literal[string] ) keyword[or] identifier[agent] . identifier[db_refs] . identifier[get] ( literal[string] ) keyword[or] identifier[agent] . identifier[db_refs] . identifier[get] ( literal[string] )):
keyword[return] keyword[False]
keyword[else] :
keyword[if] keyword[not] ( identifier[agent] . identifier[db_refs] . identifier[get] ( literal[string] ) keyword[or] identifier[agent] . identifier[db_refs] . identifier[get] ( literal[string] )):
keyword[return] keyword[False]
keyword[return] keyword[True]
|
def _agent_is_gene(agent, specific_only):
"""Returns whether an agent is for a gene.
Parameters
----------
agent: Agent
The agent to evaluate
specific_only : Optional[bool]
If True, only elementary genes/proteins evaluate as genes and families
will be filtered out. If False, families are also included.
Returns
-------
is_gene: bool
Whether the agent is a gene
"""
if not specific_only:
if not (agent.db_refs.get('HGNC') or agent.db_refs.get('UP') or agent.db_refs.get('FPLX')):
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif not (agent.db_refs.get('HGNC') or agent.db_refs.get('UP')):
return False # depends on [control=['if'], data=[]]
return True
|
def features_of_type(self, featuretype, limit=None, strand=None,
order_by=None, reverse=False,
completely_within=False):
"""
Returns an iterator of :class:`gffutils.Feature` objects.
Parameters
----------
{_method_doc}
"""
query, args = helpers.make_query(
args=[],
limit=limit,
featuretype=featuretype,
order_by=order_by,
reverse=reverse,
strand=strand,
completely_within=completely_within,
)
for i in self._execute(query, args):
yield self._feature_returner(**i)
|
def function[features_of_type, parameter[self, featuretype, limit, strand, order_by, reverse, completely_within]]:
constant[
Returns an iterator of :class:`gffutils.Feature` objects.
Parameters
----------
{_method_doc}
]
<ast.Tuple object at 0x7da18fe91690> assign[=] call[name[helpers].make_query, parameter[]]
for taget[name[i]] in starred[call[name[self]._execute, parameter[name[query], name[args]]]] begin[:]
<ast.Yield object at 0x7da18fe93c40>
|
keyword[def] identifier[features_of_type] ( identifier[self] , identifier[featuretype] , identifier[limit] = keyword[None] , identifier[strand] = keyword[None] ,
identifier[order_by] = keyword[None] , identifier[reverse] = keyword[False] ,
identifier[completely_within] = keyword[False] ):
literal[string]
identifier[query] , identifier[args] = identifier[helpers] . identifier[make_query] (
identifier[args] =[],
identifier[limit] = identifier[limit] ,
identifier[featuretype] = identifier[featuretype] ,
identifier[order_by] = identifier[order_by] ,
identifier[reverse] = identifier[reverse] ,
identifier[strand] = identifier[strand] ,
identifier[completely_within] = identifier[completely_within] ,
)
keyword[for] identifier[i] keyword[in] identifier[self] . identifier[_execute] ( identifier[query] , identifier[args] ):
keyword[yield] identifier[self] . identifier[_feature_returner] (** identifier[i] )
|
def features_of_type(self, featuretype, limit=None, strand=None, order_by=None, reverse=False, completely_within=False):
"""
Returns an iterator of :class:`gffutils.Feature` objects.
Parameters
----------
{_method_doc}
"""
(query, args) = helpers.make_query(args=[], limit=limit, featuretype=featuretype, order_by=order_by, reverse=reverse, strand=strand, completely_within=completely_within)
for i in self._execute(query, args):
yield self._feature_returner(**i) # depends on [control=['for'], data=['i']]
|
def position_target_global_int_send(self, time_boot_ms, coordinate_frame, type_mask, lat_int, lon_int, alt, vx, vy, vz, afx, afy, afz, yaw, yaw_rate, force_mavlink1=False):
'''
Reports the current commanded vehicle position, velocity, and
acceleration as specified by the autopilot. This
should match the commands sent in
SET_POSITION_TARGET_GLOBAL_INT if the vehicle is being
controlled this way.
time_boot_ms : Timestamp in milliseconds since system boot. The rationale for the timestamp in the setpoint is to allow the system to compensate for the transport delay of the setpoint. This allows the system to compensate processing latency. (uint32_t)
coordinate_frame : Valid options are: MAV_FRAME_GLOBAL_INT = 5, MAV_FRAME_GLOBAL_RELATIVE_ALT_INT = 6, MAV_FRAME_GLOBAL_TERRAIN_ALT_INT = 11 (uint8_t)
type_mask : Bitmask to indicate which dimensions should be ignored by the vehicle: a value of 0b0000000000000000 or 0b0000001000000000 indicates that none of the setpoint dimensions should be ignored. If bit 10 is set the floats afx afy afz should be interpreted as force instead of acceleration. Mapping: bit 1: x, bit 2: y, bit 3: z, bit 4: vx, bit 5: vy, bit 6: vz, bit 7: ax, bit 8: ay, bit 9: az, bit 10: is force setpoint, bit 11: yaw, bit 12: yaw rate (uint16_t)
lat_int : X Position in WGS84 frame in 1e7 * meters (int32_t)
lon_int : Y Position in WGS84 frame in 1e7 * meters (int32_t)
alt : Altitude in meters in AMSL altitude, not WGS84 if absolute or relative, above terrain if GLOBAL_TERRAIN_ALT_INT (float)
vx : X velocity in NED frame in meter / s (float)
vy : Y velocity in NED frame in meter / s (float)
vz : Z velocity in NED frame in meter / s (float)
afx : X acceleration or force (if bit 10 of type_mask is set) in NED frame in meter / s^2 or N (float)
afy : Y acceleration or force (if bit 10 of type_mask is set) in NED frame in meter / s^2 or N (float)
afz : Z acceleration or force (if bit 10 of type_mask is set) in NED frame in meter / s^2 or N (float)
yaw : yaw setpoint in rad (float)
yaw_rate : yaw rate setpoint in rad/s (float)
'''
return self.send(self.position_target_global_int_encode(time_boot_ms, coordinate_frame, type_mask, lat_int, lon_int, alt, vx, vy, vz, afx, afy, afz, yaw, yaw_rate), force_mavlink1=force_mavlink1)
|
def function[position_target_global_int_send, parameter[self, time_boot_ms, coordinate_frame, type_mask, lat_int, lon_int, alt, vx, vy, vz, afx, afy, afz, yaw, yaw_rate, force_mavlink1]]:
constant[
Reports the current commanded vehicle position, velocity, and
acceleration as specified by the autopilot. This
should match the commands sent in
SET_POSITION_TARGET_GLOBAL_INT if the vehicle is being
controlled this way.
time_boot_ms : Timestamp in milliseconds since system boot. The rationale for the timestamp in the setpoint is to allow the system to compensate for the transport delay of the setpoint. This allows the system to compensate processing latency. (uint32_t)
coordinate_frame : Valid options are: MAV_FRAME_GLOBAL_INT = 5, MAV_FRAME_GLOBAL_RELATIVE_ALT_INT = 6, MAV_FRAME_GLOBAL_TERRAIN_ALT_INT = 11 (uint8_t)
type_mask : Bitmask to indicate which dimensions should be ignored by the vehicle: a value of 0b0000000000000000 or 0b0000001000000000 indicates that none of the setpoint dimensions should be ignored. If bit 10 is set the floats afx afy afz should be interpreted as force instead of acceleration. Mapping: bit 1: x, bit 2: y, bit 3: z, bit 4: vx, bit 5: vy, bit 6: vz, bit 7: ax, bit 8: ay, bit 9: az, bit 10: is force setpoint, bit 11: yaw, bit 12: yaw rate (uint16_t)
lat_int : X Position in WGS84 frame in 1e7 * meters (int32_t)
lon_int : Y Position in WGS84 frame in 1e7 * meters (int32_t)
alt : Altitude in meters in AMSL altitude, not WGS84 if absolute or relative, above terrain if GLOBAL_TERRAIN_ALT_INT (float)
vx : X velocity in NED frame in meter / s (float)
vy : Y velocity in NED frame in meter / s (float)
vz : Z velocity in NED frame in meter / s (float)
afx : X acceleration or force (if bit 10 of type_mask is set) in NED frame in meter / s^2 or N (float)
afy : Y acceleration or force (if bit 10 of type_mask is set) in NED frame in meter / s^2 or N (float)
afz : Z acceleration or force (if bit 10 of type_mask is set) in NED frame in meter / s^2 or N (float)
yaw : yaw setpoint in rad (float)
yaw_rate : yaw rate setpoint in rad/s (float)
]
return[call[name[self].send, parameter[call[name[self].position_target_global_int_encode, parameter[name[time_boot_ms], name[coordinate_frame], name[type_mask], name[lat_int], name[lon_int], name[alt], name[vx], name[vy], name[vz], name[afx], name[afy], name[afz], name[yaw], name[yaw_rate]]]]]]
|
keyword[def] identifier[position_target_global_int_send] ( identifier[self] , identifier[time_boot_ms] , identifier[coordinate_frame] , identifier[type_mask] , identifier[lat_int] , identifier[lon_int] , identifier[alt] , identifier[vx] , identifier[vy] , identifier[vz] , identifier[afx] , identifier[afy] , identifier[afz] , identifier[yaw] , identifier[yaw_rate] , identifier[force_mavlink1] = keyword[False] ):
literal[string]
keyword[return] identifier[self] . identifier[send] ( identifier[self] . identifier[position_target_global_int_encode] ( identifier[time_boot_ms] , identifier[coordinate_frame] , identifier[type_mask] , identifier[lat_int] , identifier[lon_int] , identifier[alt] , identifier[vx] , identifier[vy] , identifier[vz] , identifier[afx] , identifier[afy] , identifier[afz] , identifier[yaw] , identifier[yaw_rate] ), identifier[force_mavlink1] = identifier[force_mavlink1] )
|
def position_target_global_int_send(self, time_boot_ms, coordinate_frame, type_mask, lat_int, lon_int, alt, vx, vy, vz, afx, afy, afz, yaw, yaw_rate, force_mavlink1=False):
"""
Reports the current commanded vehicle position, velocity, and
acceleration as specified by the autopilot. This
should match the commands sent in
SET_POSITION_TARGET_GLOBAL_INT if the vehicle is being
controlled this way.
time_boot_ms : Timestamp in milliseconds since system boot. The rationale for the timestamp in the setpoint is to allow the system to compensate for the transport delay of the setpoint. This allows the system to compensate processing latency. (uint32_t)
coordinate_frame : Valid options are: MAV_FRAME_GLOBAL_INT = 5, MAV_FRAME_GLOBAL_RELATIVE_ALT_INT = 6, MAV_FRAME_GLOBAL_TERRAIN_ALT_INT = 11 (uint8_t)
type_mask : Bitmask to indicate which dimensions should be ignored by the vehicle: a value of 0b0000000000000000 or 0b0000001000000000 indicates that none of the setpoint dimensions should be ignored. If bit 10 is set the floats afx afy afz should be interpreted as force instead of acceleration. Mapping: bit 1: x, bit 2: y, bit 3: z, bit 4: vx, bit 5: vy, bit 6: vz, bit 7: ax, bit 8: ay, bit 9: az, bit 10: is force setpoint, bit 11: yaw, bit 12: yaw rate (uint16_t)
lat_int : X Position in WGS84 frame in 1e7 * meters (int32_t)
lon_int : Y Position in WGS84 frame in 1e7 * meters (int32_t)
alt : Altitude in meters in AMSL altitude, not WGS84 if absolute or relative, above terrain if GLOBAL_TERRAIN_ALT_INT (float)
vx : X velocity in NED frame in meter / s (float)
vy : Y velocity in NED frame in meter / s (float)
vz : Z velocity in NED frame in meter / s (float)
afx : X acceleration or force (if bit 10 of type_mask is set) in NED frame in meter / s^2 or N (float)
afy : Y acceleration or force (if bit 10 of type_mask is set) in NED frame in meter / s^2 or N (float)
afz : Z acceleration or force (if bit 10 of type_mask is set) in NED frame in meter / s^2 or N (float)
yaw : yaw setpoint in rad (float)
yaw_rate : yaw rate setpoint in rad/s (float)
"""
return self.send(self.position_target_global_int_encode(time_boot_ms, coordinate_frame, type_mask, lat_int, lon_int, alt, vx, vy, vz, afx, afy, afz, yaw, yaw_rate), force_mavlink1=force_mavlink1)
|
def _GetVisitSource(self, visit_identifier, cache, database):
"""Retrieves a visit source type based on the identifier.
Args:
visit_identifier (str): identifier from the visits table for the
particular record.
cache (SQLiteCache): cache which contains cached results from querying
the visit_source table.
database (SQLiteDatabase): database.
Returns:
int: visit source type or None if no visit source type was found for
the identifier.
"""
sync_cache_results = cache.GetResults('sync')
if not sync_cache_results:
result_set = database.Query(self._SYNC_CACHE_QUERY)
cache.CacheQueryResults(result_set, 'sync', 'id', ('source',))
sync_cache_results = cache.GetResults('sync')
if sync_cache_results and visit_identifier:
results = sync_cache_results.get(visit_identifier, None)
if results:
return results[0]
return None
|
def function[_GetVisitSource, parameter[self, visit_identifier, cache, database]]:
constant[Retrieves a visit source type based on the identifier.
Args:
visit_identifier (str): identifier from the visits table for the
particular record.
cache (SQLiteCache): cache which contains cached results from querying
the visit_source table.
database (SQLiteDatabase): database.
Returns:
int: visit source type or None if no visit source type was found for
the identifier.
]
variable[sync_cache_results] assign[=] call[name[cache].GetResults, parameter[constant[sync]]]
if <ast.UnaryOp object at 0x7da207f02800> begin[:]
variable[result_set] assign[=] call[name[database].Query, parameter[name[self]._SYNC_CACHE_QUERY]]
call[name[cache].CacheQueryResults, parameter[name[result_set], constant[sync], constant[id], tuple[[<ast.Constant object at 0x7da204347970>]]]]
variable[sync_cache_results] assign[=] call[name[cache].GetResults, parameter[constant[sync]]]
if <ast.BoolOp object at 0x7da204344a30> begin[:]
variable[results] assign[=] call[name[sync_cache_results].get, parameter[name[visit_identifier], constant[None]]]
if name[results] begin[:]
return[call[name[results]][constant[0]]]
return[constant[None]]
|
keyword[def] identifier[_GetVisitSource] ( identifier[self] , identifier[visit_identifier] , identifier[cache] , identifier[database] ):
literal[string]
identifier[sync_cache_results] = identifier[cache] . identifier[GetResults] ( literal[string] )
keyword[if] keyword[not] identifier[sync_cache_results] :
identifier[result_set] = identifier[database] . identifier[Query] ( identifier[self] . identifier[_SYNC_CACHE_QUERY] )
identifier[cache] . identifier[CacheQueryResults] ( identifier[result_set] , literal[string] , literal[string] ,( literal[string] ,))
identifier[sync_cache_results] = identifier[cache] . identifier[GetResults] ( literal[string] )
keyword[if] identifier[sync_cache_results] keyword[and] identifier[visit_identifier] :
identifier[results] = identifier[sync_cache_results] . identifier[get] ( identifier[visit_identifier] , keyword[None] )
keyword[if] identifier[results] :
keyword[return] identifier[results] [ literal[int] ]
keyword[return] keyword[None]
|
def _GetVisitSource(self, visit_identifier, cache, database):
"""Retrieves a visit source type based on the identifier.
Args:
visit_identifier (str): identifier from the visits table for the
particular record.
cache (SQLiteCache): cache which contains cached results from querying
the visit_source table.
database (SQLiteDatabase): database.
Returns:
int: visit source type or None if no visit source type was found for
the identifier.
"""
sync_cache_results = cache.GetResults('sync')
if not sync_cache_results:
result_set = database.Query(self._SYNC_CACHE_QUERY)
cache.CacheQueryResults(result_set, 'sync', 'id', ('source',))
sync_cache_results = cache.GetResults('sync') # depends on [control=['if'], data=[]]
if sync_cache_results and visit_identifier:
results = sync_cache_results.get(visit_identifier, None)
if results:
return results[0] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return None
|
def request_vms_info(self, payload):
"""Get the VMs from the database and send the info to the agent."""
# This request is received from an agent when it runs for the first
# time and uplink is detected.
agent = payload.get('agent')
LOG.debug('request_vms_info: Getting VMs info for %s', agent)
req = dict(host=payload.get('agent'))
instances = self.get_vms_for_this_req(**req)
vm_info = []
for vm in instances:
vm_info.append(dict(status=vm.status,
vm_mac=vm.mac,
segmentation_id=vm.segmentation_id,
host=vm.host,
port_uuid=vm.port_id,
net_uuid=vm.network_id,
oui=dict(ip_addr=vm.ip,
vm_name=vm.name,
vm_uuid=vm.instance_id,
gw_mac=vm.gw_mac,
fwd_mod=vm.fwd_mod,
oui_id='cisco')))
try:
self.neutron_event.send_vm_info(agent, str(vm_info))
except (rpc.MessagingTimeout, rpc.RPCException, rpc.RemoteError):
LOG.error('Failed to send VM info to agent.')
|
def function[request_vms_info, parameter[self, payload]]:
constant[Get the VMs from the database and send the info to the agent.]
variable[agent] assign[=] call[name[payload].get, parameter[constant[agent]]]
call[name[LOG].debug, parameter[constant[request_vms_info: Getting VMs info for %s], name[agent]]]
variable[req] assign[=] call[name[dict], parameter[]]
variable[instances] assign[=] call[name[self].get_vms_for_this_req, parameter[]]
variable[vm_info] assign[=] list[[]]
for taget[name[vm]] in starred[name[instances]] begin[:]
call[name[vm_info].append, parameter[call[name[dict], parameter[]]]]
<ast.Try object at 0x7da1b1b16c80>
|
keyword[def] identifier[request_vms_info] ( identifier[self] , identifier[payload] ):
literal[string]
identifier[agent] = identifier[payload] . identifier[get] ( literal[string] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[agent] )
identifier[req] = identifier[dict] ( identifier[host] = identifier[payload] . identifier[get] ( literal[string] ))
identifier[instances] = identifier[self] . identifier[get_vms_for_this_req] (** identifier[req] )
identifier[vm_info] =[]
keyword[for] identifier[vm] keyword[in] identifier[instances] :
identifier[vm_info] . identifier[append] ( identifier[dict] ( identifier[status] = identifier[vm] . identifier[status] ,
identifier[vm_mac] = identifier[vm] . identifier[mac] ,
identifier[segmentation_id] = identifier[vm] . identifier[segmentation_id] ,
identifier[host] = identifier[vm] . identifier[host] ,
identifier[port_uuid] = identifier[vm] . identifier[port_id] ,
identifier[net_uuid] = identifier[vm] . identifier[network_id] ,
identifier[oui] = identifier[dict] ( identifier[ip_addr] = identifier[vm] . identifier[ip] ,
identifier[vm_name] = identifier[vm] . identifier[name] ,
identifier[vm_uuid] = identifier[vm] . identifier[instance_id] ,
identifier[gw_mac] = identifier[vm] . identifier[gw_mac] ,
identifier[fwd_mod] = identifier[vm] . identifier[fwd_mod] ,
identifier[oui_id] = literal[string] )))
keyword[try] :
identifier[self] . identifier[neutron_event] . identifier[send_vm_info] ( identifier[agent] , identifier[str] ( identifier[vm_info] ))
keyword[except] ( identifier[rpc] . identifier[MessagingTimeout] , identifier[rpc] . identifier[RPCException] , identifier[rpc] . identifier[RemoteError] ):
identifier[LOG] . identifier[error] ( literal[string] )
|
def request_vms_info(self, payload):
"""Get the VMs from the database and send the info to the agent."""
# This request is received from an agent when it runs for the first
# time and uplink is detected.
agent = payload.get('agent')
LOG.debug('request_vms_info: Getting VMs info for %s', agent)
req = dict(host=payload.get('agent'))
instances = self.get_vms_for_this_req(**req)
vm_info = []
for vm in instances:
vm_info.append(dict(status=vm.status, vm_mac=vm.mac, segmentation_id=vm.segmentation_id, host=vm.host, port_uuid=vm.port_id, net_uuid=vm.network_id, oui=dict(ip_addr=vm.ip, vm_name=vm.name, vm_uuid=vm.instance_id, gw_mac=vm.gw_mac, fwd_mod=vm.fwd_mod, oui_id='cisco'))) # depends on [control=['for'], data=['vm']]
try:
self.neutron_event.send_vm_info(agent, str(vm_info)) # depends on [control=['try'], data=[]]
except (rpc.MessagingTimeout, rpc.RPCException, rpc.RemoteError):
LOG.error('Failed to send VM info to agent.') # depends on [control=['except'], data=[]]
|
def update_metadata(self, **kwargs):
"""
::
POST /:login/machines/:id/metadata
:Returns: current metadata
:rtype: :py:class:`dict`
Send an metadata dict update for the machine (following dict.update()
semantics) using the keys and values passed in the keyword arguments.
The method also refreshes the locally cached copy of the metadata kept
in the :py:attr:`metadata` attribute and returns it.
"""
j, _ = self.datacenter.request('POST', self.path + '/metadata',
data=kwargs)
self.metadata = j
return j
|
def function[update_metadata, parameter[self]]:
constant[
::
POST /:login/machines/:id/metadata
:Returns: current metadata
:rtype: :py:class:`dict`
Send an metadata dict update for the machine (following dict.update()
semantics) using the keys and values passed in the keyword arguments.
The method also refreshes the locally cached copy of the metadata kept
in the :py:attr:`metadata` attribute and returns it.
]
<ast.Tuple object at 0x7da20e9b31f0> assign[=] call[name[self].datacenter.request, parameter[constant[POST], binary_operation[name[self].path + constant[/metadata]]]]
name[self].metadata assign[=] name[j]
return[name[j]]
|
keyword[def] identifier[update_metadata] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[j] , identifier[_] = identifier[self] . identifier[datacenter] . identifier[request] ( literal[string] , identifier[self] . identifier[path] + literal[string] ,
identifier[data] = identifier[kwargs] )
identifier[self] . identifier[metadata] = identifier[j]
keyword[return] identifier[j]
|
def update_metadata(self, **kwargs):
"""
::
POST /:login/machines/:id/metadata
:Returns: current metadata
:rtype: :py:class:`dict`
Send an metadata dict update for the machine (following dict.update()
semantics) using the keys and values passed in the keyword arguments.
The method also refreshes the locally cached copy of the metadata kept
in the :py:attr:`metadata` attribute and returns it.
"""
(j, _) = self.datacenter.request('POST', self.path + '/metadata', data=kwargs)
self.metadata = j
return j
|
def visual_accelerators(self, value):
"""
Setter for **self.__visual_accelerators** attribute.
:param value: Attribute value.
:type value: tuple or list
"""
if value is not None:
assert type(value) in (tuple, list), "'{0}' attribute: '{1}' type is not 'tuple' or 'list'!".format(
"visual_accelerators", value)
self.__visual_accelerators = value
|
def function[visual_accelerators, parameter[self, value]]:
constant[
Setter for **self.__visual_accelerators** attribute.
:param value: Attribute value.
:type value: tuple or list
]
if compare[name[value] is_not constant[None]] begin[:]
assert[compare[call[name[type], parameter[name[value]]] in tuple[[<ast.Name object at 0x7da1b09bd2d0>, <ast.Name object at 0x7da1b09bcee0>]]]]
name[self].__visual_accelerators assign[=] name[value]
|
keyword[def] identifier[visual_accelerators] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[assert] identifier[type] ( identifier[value] ) keyword[in] ( identifier[tuple] , identifier[list] ), literal[string] . identifier[format] (
literal[string] , identifier[value] )
identifier[self] . identifier[__visual_accelerators] = identifier[value]
|
def visual_accelerators(self, value):
"""
Setter for **self.__visual_accelerators** attribute.
:param value: Attribute value.
:type value: tuple or list
"""
if value is not None:
assert type(value) in (tuple, list), "'{0}' attribute: '{1}' type is not 'tuple' or 'list'!".format('visual_accelerators', value) # depends on [control=['if'], data=['value']]
self.__visual_accelerators = value
|
def deploy(verbose, app):
"""Deploy app using Heroku to MTurk."""
# Load psiTurk configuration.
config = PsiturkConfig()
config.load_config()
# Set the mode.
config.set("Experiment Configuration", "mode", "deploy")
config.set("Server Parameters", "logfile", "-")
# Ensure that psiTurk is not in sandbox mode.
config.set("Shell Parameters", "launch_in_sandbox_mode", "false")
# Do shared setup.
deploy_sandbox_shared_setup(verbose=verbose, app=app)
|
def function[deploy, parameter[verbose, app]]:
constant[Deploy app using Heroku to MTurk.]
variable[config] assign[=] call[name[PsiturkConfig], parameter[]]
call[name[config].load_config, parameter[]]
call[name[config].set, parameter[constant[Experiment Configuration], constant[mode], constant[deploy]]]
call[name[config].set, parameter[constant[Server Parameters], constant[logfile], constant[-]]]
call[name[config].set, parameter[constant[Shell Parameters], constant[launch_in_sandbox_mode], constant[false]]]
call[name[deploy_sandbox_shared_setup], parameter[]]
|
keyword[def] identifier[deploy] ( identifier[verbose] , identifier[app] ):
literal[string]
identifier[config] = identifier[PsiturkConfig] ()
identifier[config] . identifier[load_config] ()
identifier[config] . identifier[set] ( literal[string] , literal[string] , literal[string] )
identifier[config] . identifier[set] ( literal[string] , literal[string] , literal[string] )
identifier[config] . identifier[set] ( literal[string] , literal[string] , literal[string] )
identifier[deploy_sandbox_shared_setup] ( identifier[verbose] = identifier[verbose] , identifier[app] = identifier[app] )
|
def deploy(verbose, app):
"""Deploy app using Heroku to MTurk."""
# Load psiTurk configuration.
config = PsiturkConfig()
config.load_config()
# Set the mode.
config.set('Experiment Configuration', 'mode', 'deploy')
config.set('Server Parameters', 'logfile', '-')
# Ensure that psiTurk is not in sandbox mode.
config.set('Shell Parameters', 'launch_in_sandbox_mode', 'false')
# Do shared setup.
deploy_sandbox_shared_setup(verbose=verbose, app=app)
|
def remove(backend, variable):
'''remove a variable from the config, if found.
'''
print('[remove]')
settings = read_client_secrets()
# If the variable begins with the SREGISTRY_<CLIENT> don't add it
prefixed = variable
prefix = 'SREGISTRY_%s_' %backend.upper()
if not variable.startswith(prefix):
prefixed = '%s%s' %(prefix, variable)
# All must be uppercase
variable = variable.upper()
bot.info(variable)
# Does the setting already exist?
if backend in settings:
if variable in settings[backend]:
del settings[backend][variable]
if prefixed in settings[backend]:
del settings[backend][prefixed]
update_secrets(settings)
|
def function[remove, parameter[backend, variable]]:
constant[remove a variable from the config, if found.
]
call[name[print], parameter[constant[[remove]]]]
variable[settings] assign[=] call[name[read_client_secrets], parameter[]]
variable[prefixed] assign[=] name[variable]
variable[prefix] assign[=] binary_operation[constant[SREGISTRY_%s_] <ast.Mod object at 0x7da2590d6920> call[name[backend].upper, parameter[]]]
if <ast.UnaryOp object at 0x7da1b02b88b0> begin[:]
variable[prefixed] assign[=] binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b02b85e0>, <ast.Name object at 0x7da1b02bbbe0>]]]
variable[variable] assign[=] call[name[variable].upper, parameter[]]
call[name[bot].info, parameter[name[variable]]]
if compare[name[backend] in name[settings]] begin[:]
if compare[name[variable] in call[name[settings]][name[backend]]] begin[:]
<ast.Delete object at 0x7da1b02bb7c0>
if compare[name[prefixed] in call[name[settings]][name[backend]]] begin[:]
<ast.Delete object at 0x7da1b02bbd00>
call[name[update_secrets], parameter[name[settings]]]
|
keyword[def] identifier[remove] ( identifier[backend] , identifier[variable] ):
literal[string]
identifier[print] ( literal[string] )
identifier[settings] = identifier[read_client_secrets] ()
identifier[prefixed] = identifier[variable]
identifier[prefix] = literal[string] % identifier[backend] . identifier[upper] ()
keyword[if] keyword[not] identifier[variable] . identifier[startswith] ( identifier[prefix] ):
identifier[prefixed] = literal[string] %( identifier[prefix] , identifier[variable] )
identifier[variable] = identifier[variable] . identifier[upper] ()
identifier[bot] . identifier[info] ( identifier[variable] )
keyword[if] identifier[backend] keyword[in] identifier[settings] :
keyword[if] identifier[variable] keyword[in] identifier[settings] [ identifier[backend] ]:
keyword[del] identifier[settings] [ identifier[backend] ][ identifier[variable] ]
keyword[if] identifier[prefixed] keyword[in] identifier[settings] [ identifier[backend] ]:
keyword[del] identifier[settings] [ identifier[backend] ][ identifier[prefixed] ]
identifier[update_secrets] ( identifier[settings] )
|
def remove(backend, variable):
"""remove a variable from the config, if found.
"""
print('[remove]')
settings = read_client_secrets()
# If the variable begins with the SREGISTRY_<CLIENT> don't add it
prefixed = variable
prefix = 'SREGISTRY_%s_' % backend.upper()
if not variable.startswith(prefix):
prefixed = '%s%s' % (prefix, variable) # depends on [control=['if'], data=[]]
# All must be uppercase
variable = variable.upper()
bot.info(variable)
# Does the setting already exist?
if backend in settings:
if variable in settings[backend]:
del settings[backend][variable] # depends on [control=['if'], data=['variable']]
if prefixed in settings[backend]:
del settings[backend][prefixed] # depends on [control=['if'], data=['prefixed']]
update_secrets(settings) # depends on [control=['if'], data=['backend', 'settings']]
|
def is_stationarity(self, tolerance=0.2, sample=None):
"""
Checks if the given markov chain is stationary and checks the steady state
probablity values for the state are consistent.
Parameters:
-----------
tolerance: float
represents the diff between actual steady state value and the computed value
sample: [State(i,j)]
represents the list of state which the markov chain has sampled
Return Type:
------------
Boolean
True, if the markov chain converges to steady state distribution within the tolerance
False, if the markov chain does not converge to steady state distribution within tolerance
Examples:
---------
>>> from pgmpy.models.MarkovChain import MarkovChain
>>> from pgmpy.factors.discrete import State
>>> model = MarkovChain()
>>> model.add_variables_from(['intel', 'diff'], [3, 2])
>>> intel_tm = {0: {0: 0.2, 1: 0.4, 2:0.4}, 1: {0: 0, 1: 0.5, 2: 0.5}, 2: {0: 0.3, 1: 0.3, 2: 0.4}}
>>> model.add_transition_model('intel', intel_tm)
>>> diff_tm = {0: {0: 0.5, 1: 0.5}, 1: {0: 0.25, 1:0.75}}
>>> model.add_transition_model('diff', diff_tm)
>>> model.is_stationarity()
True
"""
keys = self.transition_models.keys()
return_val = True
for k in keys:
# convert dict to numpy matrix
transition_mat = np.array([np.array(list(self.transition_models[k][i].values()))
for i in self.transition_models[k].keys()], dtype=np.float)
S, U = eig(transition_mat.T)
stationary = np.array(U[:, np.where(np.abs(S - 1.) < 1e-8)[0][0]].flat)
stationary = (stationary / np.sum(stationary)).real
probabilites = []
window_size = 10000 if sample is None else len(sample)
for i in range(0, transition_mat.shape[0]):
probabilites.extend(self.prob_from_sample([State(k, i)], window_size=window_size))
if any(np.abs(i) > tolerance for i in np.subtract(probabilites, stationary)):
return_val = return_val and False
else:
return_val = return_val and True
return return_val
|
def function[is_stationarity, parameter[self, tolerance, sample]]:
constant[
Checks if the given markov chain is stationary and checks the steady state
probablity values for the state are consistent.
Parameters:
-----------
tolerance: float
represents the diff between actual steady state value and the computed value
sample: [State(i,j)]
represents the list of state which the markov chain has sampled
Return Type:
------------
Boolean
True, if the markov chain converges to steady state distribution within the tolerance
False, if the markov chain does not converge to steady state distribution within tolerance
Examples:
---------
>>> from pgmpy.models.MarkovChain import MarkovChain
>>> from pgmpy.factors.discrete import State
>>> model = MarkovChain()
>>> model.add_variables_from(['intel', 'diff'], [3, 2])
>>> intel_tm = {0: {0: 0.2, 1: 0.4, 2:0.4}, 1: {0: 0, 1: 0.5, 2: 0.5}, 2: {0: 0.3, 1: 0.3, 2: 0.4}}
>>> model.add_transition_model('intel', intel_tm)
>>> diff_tm = {0: {0: 0.5, 1: 0.5}, 1: {0: 0.25, 1:0.75}}
>>> model.add_transition_model('diff', diff_tm)
>>> model.is_stationarity()
True
]
variable[keys] assign[=] call[name[self].transition_models.keys, parameter[]]
variable[return_val] assign[=] constant[True]
for taget[name[k]] in starred[name[keys]] begin[:]
variable[transition_mat] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da20c6ab2b0>]]
<ast.Tuple object at 0x7da20c6abdc0> assign[=] call[name[eig], parameter[name[transition_mat].T]]
variable[stationary] assign[=] call[name[np].array, parameter[call[name[U]][tuple[[<ast.Slice object at 0x7da20c6aafe0>, <ast.Subscript object at 0x7da20c6ab130>]]].flat]]
variable[stationary] assign[=] binary_operation[name[stationary] / call[name[np].sum, parameter[name[stationary]]]].real
variable[probabilites] assign[=] list[[]]
variable[window_size] assign[=] <ast.IfExp object at 0x7da20c6aa5f0>
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[transition_mat].shape][constant[0]]]]] begin[:]
call[name[probabilites].extend, parameter[call[name[self].prob_from_sample, parameter[list[[<ast.Call object at 0x7da1b2344130>]]]]]]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b2346a70>]] begin[:]
variable[return_val] assign[=] <ast.BoolOp object at 0x7da1b2344730>
return[name[return_val]]
|
keyword[def] identifier[is_stationarity] ( identifier[self] , identifier[tolerance] = literal[int] , identifier[sample] = keyword[None] ):
literal[string]
identifier[keys] = identifier[self] . identifier[transition_models] . identifier[keys] ()
identifier[return_val] = keyword[True]
keyword[for] identifier[k] keyword[in] identifier[keys] :
identifier[transition_mat] = identifier[np] . identifier[array] ([ identifier[np] . identifier[array] ( identifier[list] ( identifier[self] . identifier[transition_models] [ identifier[k] ][ identifier[i] ]. identifier[values] ()))
keyword[for] identifier[i] keyword[in] identifier[self] . identifier[transition_models] [ identifier[k] ]. identifier[keys] ()], identifier[dtype] = identifier[np] . identifier[float] )
identifier[S] , identifier[U] = identifier[eig] ( identifier[transition_mat] . identifier[T] )
identifier[stationary] = identifier[np] . identifier[array] ( identifier[U] [:, identifier[np] . identifier[where] ( identifier[np] . identifier[abs] ( identifier[S] - literal[int] )< literal[int] )[ literal[int] ][ literal[int] ]]. identifier[flat] )
identifier[stationary] =( identifier[stationary] / identifier[np] . identifier[sum] ( identifier[stationary] )). identifier[real]
identifier[probabilites] =[]
identifier[window_size] = literal[int] keyword[if] identifier[sample] keyword[is] keyword[None] keyword[else] identifier[len] ( identifier[sample] )
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[transition_mat] . identifier[shape] [ literal[int] ]):
identifier[probabilites] . identifier[extend] ( identifier[self] . identifier[prob_from_sample] ([ identifier[State] ( identifier[k] , identifier[i] )], identifier[window_size] = identifier[window_size] ))
keyword[if] identifier[any] ( identifier[np] . identifier[abs] ( identifier[i] )> identifier[tolerance] keyword[for] identifier[i] keyword[in] identifier[np] . identifier[subtract] ( identifier[probabilites] , identifier[stationary] )):
identifier[return_val] = identifier[return_val] keyword[and] keyword[False]
keyword[else] :
identifier[return_val] = identifier[return_val] keyword[and] keyword[True]
keyword[return] identifier[return_val]
|
def is_stationarity(self, tolerance=0.2, sample=None):
"""
Checks if the given markov chain is stationary and checks the steady state
probablity values for the state are consistent.
Parameters:
-----------
tolerance: float
represents the diff between actual steady state value and the computed value
sample: [State(i,j)]
represents the list of state which the markov chain has sampled
Return Type:
------------
Boolean
True, if the markov chain converges to steady state distribution within the tolerance
False, if the markov chain does not converge to steady state distribution within tolerance
Examples:
---------
>>> from pgmpy.models.MarkovChain import MarkovChain
>>> from pgmpy.factors.discrete import State
>>> model = MarkovChain()
>>> model.add_variables_from(['intel', 'diff'], [3, 2])
>>> intel_tm = {0: {0: 0.2, 1: 0.4, 2:0.4}, 1: {0: 0, 1: 0.5, 2: 0.5}, 2: {0: 0.3, 1: 0.3, 2: 0.4}}
>>> model.add_transition_model('intel', intel_tm)
>>> diff_tm = {0: {0: 0.5, 1: 0.5}, 1: {0: 0.25, 1:0.75}}
>>> model.add_transition_model('diff', diff_tm)
>>> model.is_stationarity()
True
"""
keys = self.transition_models.keys()
return_val = True
for k in keys:
# convert dict to numpy matrix
transition_mat = np.array([np.array(list(self.transition_models[k][i].values())) for i in self.transition_models[k].keys()], dtype=np.float)
(S, U) = eig(transition_mat.T)
stationary = np.array(U[:, np.where(np.abs(S - 1.0) < 1e-08)[0][0]].flat)
stationary = (stationary / np.sum(stationary)).real
probabilites = []
window_size = 10000 if sample is None else len(sample)
for i in range(0, transition_mat.shape[0]):
probabilites.extend(self.prob_from_sample([State(k, i)], window_size=window_size)) # depends on [control=['for'], data=['i']]
if any((np.abs(i) > tolerance for i in np.subtract(probabilites, stationary))):
return_val = return_val and False # depends on [control=['if'], data=[]]
else:
return_val = return_val and True # depends on [control=['for'], data=['k']]
return return_val
|
async def async_open(self) -> None:
"""Opens connection to the LifeSOS ethernet interface."""
await self._loop.create_connection(
lambda: self,
self._host,
self._port)
|
<ast.AsyncFunctionDef object at 0x7da2044c0250>
|
keyword[async] keyword[def] identifier[async_open] ( identifier[self] )-> keyword[None] :
literal[string]
keyword[await] identifier[self] . identifier[_loop] . identifier[create_connection] (
keyword[lambda] : identifier[self] ,
identifier[self] . identifier[_host] ,
identifier[self] . identifier[_port] )
|
async def async_open(self) -> None:
"""Opens connection to the LifeSOS ethernet interface."""
await self._loop.create_connection(lambda : self, self._host, self._port)
|
def attributs(self):
"""
The user attributes, defined as the fields on the :attr:`user` object.
:return: a :class:`dict` with the :attr:`user` object fields. Attributes may be
If the user do not exists, the returned :class:`dict` is empty.
:rtype: dict
"""
if self.user:
attr = {}
# _meta.get_fields() is from the new documented _meta interface in django 1.8
try:
field_names = [
field.attname for field in self.user._meta.get_fields()
if hasattr(field, "attname")
]
# backward compatibility with django 1.7
except AttributeError: # pragma: no cover (only used by django 1.7)
field_names = self.user._meta.get_all_field_names()
for name in field_names:
attr[name] = getattr(self.user, name)
# unfold user_permissions many to many relation
if 'user_permissions' in attr:
attr['user_permissions'] = [
(
u"%s.%s" % (
perm.content_type.model_class().__module__,
perm.content_type.model_class().__name__
),
perm.codename
) for perm in attr['user_permissions'].filter()
]
# unfold group many to many relation
if 'groups' in attr:
attr['groups'] = [group.name for group in attr['groups'].filter()]
return attr
else:
return {}
|
def function[attributs, parameter[self]]:
constant[
The user attributes, defined as the fields on the :attr:`user` object.
:return: a :class:`dict` with the :attr:`user` object fields. Attributes may be
If the user do not exists, the returned :class:`dict` is empty.
:rtype: dict
]
if name[self].user begin[:]
variable[attr] assign[=] dictionary[[], []]
<ast.Try object at 0x7da1b0de4e50>
for taget[name[name]] in starred[name[field_names]] begin[:]
call[name[attr]][name[name]] assign[=] call[name[getattr], parameter[name[self].user, name[name]]]
if compare[constant[user_permissions] in name[attr]] begin[:]
call[name[attr]][constant[user_permissions]] assign[=] <ast.ListComp object at 0x7da1b0de5e10>
if compare[constant[groups] in name[attr]] begin[:]
call[name[attr]][constant[groups]] assign[=] <ast.ListComp object at 0x7da1b0de5120>
return[name[attr]]
|
keyword[def] identifier[attributs] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[user] :
identifier[attr] ={}
keyword[try] :
identifier[field_names] =[
identifier[field] . identifier[attname] keyword[for] identifier[field] keyword[in] identifier[self] . identifier[user] . identifier[_meta] . identifier[get_fields] ()
keyword[if] identifier[hasattr] ( identifier[field] , literal[string] )
]
keyword[except] identifier[AttributeError] :
identifier[field_names] = identifier[self] . identifier[user] . identifier[_meta] . identifier[get_all_field_names] ()
keyword[for] identifier[name] keyword[in] identifier[field_names] :
identifier[attr] [ identifier[name] ]= identifier[getattr] ( identifier[self] . identifier[user] , identifier[name] )
keyword[if] literal[string] keyword[in] identifier[attr] :
identifier[attr] [ literal[string] ]=[
(
literal[string] %(
identifier[perm] . identifier[content_type] . identifier[model_class] (). identifier[__module__] ,
identifier[perm] . identifier[content_type] . identifier[model_class] (). identifier[__name__]
),
identifier[perm] . identifier[codename]
) keyword[for] identifier[perm] keyword[in] identifier[attr] [ literal[string] ]. identifier[filter] ()
]
keyword[if] literal[string] keyword[in] identifier[attr] :
identifier[attr] [ literal[string] ]=[ identifier[group] . identifier[name] keyword[for] identifier[group] keyword[in] identifier[attr] [ literal[string] ]. identifier[filter] ()]
keyword[return] identifier[attr]
keyword[else] :
keyword[return] {}
|
def attributs(self):
"""
The user attributes, defined as the fields on the :attr:`user` object.
:return: a :class:`dict` with the :attr:`user` object fields. Attributes may be
If the user do not exists, the returned :class:`dict` is empty.
:rtype: dict
"""
if self.user:
attr = {}
# _meta.get_fields() is from the new documented _meta interface in django 1.8
try:
field_names = [field.attname for field in self.user._meta.get_fields() if hasattr(field, 'attname')] # depends on [control=['try'], data=[]]
# backward compatibility with django 1.7
except AttributeError: # pragma: no cover (only used by django 1.7)
field_names = self.user._meta.get_all_field_names() # depends on [control=['except'], data=[]]
for name in field_names:
attr[name] = getattr(self.user, name) # depends on [control=['for'], data=['name']]
# unfold user_permissions many to many relation
if 'user_permissions' in attr:
attr['user_permissions'] = [(u'%s.%s' % (perm.content_type.model_class().__module__, perm.content_type.model_class().__name__), perm.codename) for perm in attr['user_permissions'].filter()] # depends on [control=['if'], data=['attr']]
# unfold group many to many relation
if 'groups' in attr:
attr['groups'] = [group.name for group in attr['groups'].filter()] # depends on [control=['if'], data=['attr']]
return attr # depends on [control=['if'], data=[]]
else:
return {}
|
def readline(self):
"""Read a chunk of the output"""
_LOGGER.info("reading line")
line = self.read(self.line_length)
if len(line) < self.line_length:
_LOGGER.info("all lines read")
return line
|
def function[readline, parameter[self]]:
constant[Read a chunk of the output]
call[name[_LOGGER].info, parameter[constant[reading line]]]
variable[line] assign[=] call[name[self].read, parameter[name[self].line_length]]
if compare[call[name[len], parameter[name[line]]] less[<] name[self].line_length] begin[:]
call[name[_LOGGER].info, parameter[constant[all lines read]]]
return[name[line]]
|
keyword[def] identifier[readline] ( identifier[self] ):
literal[string]
identifier[_LOGGER] . identifier[info] ( literal[string] )
identifier[line] = identifier[self] . identifier[read] ( identifier[self] . identifier[line_length] )
keyword[if] identifier[len] ( identifier[line] )< identifier[self] . identifier[line_length] :
identifier[_LOGGER] . identifier[info] ( literal[string] )
keyword[return] identifier[line]
|
def readline(self):
"""Read a chunk of the output"""
_LOGGER.info('reading line')
line = self.read(self.line_length)
if len(line) < self.line_length:
_LOGGER.info('all lines read') # depends on [control=['if'], data=[]]
return line
|
def fix_display(self):
"""If this is being run on a headless system the Matplotlib
backend must be changed to one that doesn't need a display.
"""
try:
tkinter.Tk()
except (tkinter.TclError, NameError): # If there is no display.
try:
import matplotlib as mpl
except ImportError:
pass
else:
print("Setting matplotlib backend to Agg")
mpl.use('Agg')
|
def function[fix_display, parameter[self]]:
constant[If this is being run on a headless system the Matplotlib
backend must be changed to one that doesn't need a display.
]
<ast.Try object at 0x7da1b0e65870>
|
keyword[def] identifier[fix_display] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[tkinter] . identifier[Tk] ()
keyword[except] ( identifier[tkinter] . identifier[TclError] , identifier[NameError] ):
keyword[try] :
keyword[import] identifier[matplotlib] keyword[as] identifier[mpl]
keyword[except] identifier[ImportError] :
keyword[pass]
keyword[else] :
identifier[print] ( literal[string] )
identifier[mpl] . identifier[use] ( literal[string] )
|
def fix_display(self):
"""If this is being run on a headless system the Matplotlib
backend must be changed to one that doesn't need a display.
"""
try:
tkinter.Tk() # depends on [control=['try'], data=[]]
except (tkinter.TclError, NameError): # If there is no display.
try:
import matplotlib as mpl # depends on [control=['try'], data=[]]
except ImportError:
pass # depends on [control=['except'], data=[]]
else:
print('Setting matplotlib backend to Agg')
mpl.use('Agg') # depends on [control=['except'], data=[]]
|
def _get_array_controller_resource(self):
"""Gets the ArrayController resource if exists.
:raises: IloCommandNotSupportedError if the resource ArrayController
doesn't exist.
:returns the tuple of SmartStorage URI, Headers and settings.
"""
headers, storage_uri, storage_settings = self._get_storage_resource()
if ('links' in storage_settings and
'ArrayControllers' in storage_settings['links']):
# Get the ArrayCOntrollers URI and Settings
array_uri = storage_settings['links']['ArrayControllers']['href']
status, headers, array_settings = self._rest_get(array_uri)
if status >= 300:
msg = self._get_extended_error(array_settings)
raise exception.IloError(msg)
return headers, array_uri, array_settings
else:
msg = ('"links/ArrayControllers" section in SmartStorage'
' does not exist')
raise exception.IloCommandNotSupportedError(msg)
|
def function[_get_array_controller_resource, parameter[self]]:
constant[Gets the ArrayController resource if exists.
:raises: IloCommandNotSupportedError if the resource ArrayController
doesn't exist.
:returns the tuple of SmartStorage URI, Headers and settings.
]
<ast.Tuple object at 0x7da1b1a8da80> assign[=] call[name[self]._get_storage_resource, parameter[]]
if <ast.BoolOp object at 0x7da1b1a8d6f0> begin[:]
variable[array_uri] assign[=] call[call[call[name[storage_settings]][constant[links]]][constant[ArrayControllers]]][constant[href]]
<ast.Tuple object at 0x7da1b1a8f0d0> assign[=] call[name[self]._rest_get, parameter[name[array_uri]]]
if compare[name[status] greater_or_equal[>=] constant[300]] begin[:]
variable[msg] assign[=] call[name[self]._get_extended_error, parameter[name[array_settings]]]
<ast.Raise object at 0x7da1b1a8dc00>
return[tuple[[<ast.Name object at 0x7da1b1a8db70>, <ast.Name object at 0x7da1b1a8e350>, <ast.Name object at 0x7da1b1a8ef50>]]]
|
keyword[def] identifier[_get_array_controller_resource] ( identifier[self] ):
literal[string]
identifier[headers] , identifier[storage_uri] , identifier[storage_settings] = identifier[self] . identifier[_get_storage_resource] ()
keyword[if] ( literal[string] keyword[in] identifier[storage_settings] keyword[and]
literal[string] keyword[in] identifier[storage_settings] [ literal[string] ]):
identifier[array_uri] = identifier[storage_settings] [ literal[string] ][ literal[string] ][ literal[string] ]
identifier[status] , identifier[headers] , identifier[array_settings] = identifier[self] . identifier[_rest_get] ( identifier[array_uri] )
keyword[if] identifier[status] >= literal[int] :
identifier[msg] = identifier[self] . identifier[_get_extended_error] ( identifier[array_settings] )
keyword[raise] identifier[exception] . identifier[IloError] ( identifier[msg] )
keyword[return] identifier[headers] , identifier[array_uri] , identifier[array_settings]
keyword[else] :
identifier[msg] =( literal[string]
literal[string] )
keyword[raise] identifier[exception] . identifier[IloCommandNotSupportedError] ( identifier[msg] )
|
def _get_array_controller_resource(self):
"""Gets the ArrayController resource if exists.
:raises: IloCommandNotSupportedError if the resource ArrayController
doesn't exist.
:returns the tuple of SmartStorage URI, Headers and settings.
"""
(headers, storage_uri, storage_settings) = self._get_storage_resource()
if 'links' in storage_settings and 'ArrayControllers' in storage_settings['links']:
# Get the ArrayCOntrollers URI and Settings
array_uri = storage_settings['links']['ArrayControllers']['href']
(status, headers, array_settings) = self._rest_get(array_uri)
if status >= 300:
msg = self._get_extended_error(array_settings)
raise exception.IloError(msg) # depends on [control=['if'], data=[]]
return (headers, array_uri, array_settings) # depends on [control=['if'], data=[]]
else:
msg = '"links/ArrayControllers" section in SmartStorage does not exist'
raise exception.IloCommandNotSupportedError(msg)
|
def _parse_hits(self, hits, resource):
"""Parse hits response into documents."""
datasource = self.get_datasource(resource)
schema = {}
schema.update(config.DOMAIN[datasource[0]].get('schema', {}))
schema.update(config.DOMAIN[resource].get('schema', {}))
dates = get_dates(schema)
docs = []
for hit in hits.get('hits', {}).get('hits', []):
docs.append(format_doc(hit, schema, dates))
return ElasticCursor(hits, docs)
|
def function[_parse_hits, parameter[self, hits, resource]]:
constant[Parse hits response into documents.]
variable[datasource] assign[=] call[name[self].get_datasource, parameter[name[resource]]]
variable[schema] assign[=] dictionary[[], []]
call[name[schema].update, parameter[call[call[name[config].DOMAIN][call[name[datasource]][constant[0]]].get, parameter[constant[schema], dictionary[[], []]]]]]
call[name[schema].update, parameter[call[call[name[config].DOMAIN][name[resource]].get, parameter[constant[schema], dictionary[[], []]]]]]
variable[dates] assign[=] call[name[get_dates], parameter[name[schema]]]
variable[docs] assign[=] list[[]]
for taget[name[hit]] in starred[call[call[name[hits].get, parameter[constant[hits], dictionary[[], []]]].get, parameter[constant[hits], list[[]]]]] begin[:]
call[name[docs].append, parameter[call[name[format_doc], parameter[name[hit], name[schema], name[dates]]]]]
return[call[name[ElasticCursor], parameter[name[hits], name[docs]]]]
|
keyword[def] identifier[_parse_hits] ( identifier[self] , identifier[hits] , identifier[resource] ):
literal[string]
identifier[datasource] = identifier[self] . identifier[get_datasource] ( identifier[resource] )
identifier[schema] ={}
identifier[schema] . identifier[update] ( identifier[config] . identifier[DOMAIN] [ identifier[datasource] [ literal[int] ]]. identifier[get] ( literal[string] ,{}))
identifier[schema] . identifier[update] ( identifier[config] . identifier[DOMAIN] [ identifier[resource] ]. identifier[get] ( literal[string] ,{}))
identifier[dates] = identifier[get_dates] ( identifier[schema] )
identifier[docs] =[]
keyword[for] identifier[hit] keyword[in] identifier[hits] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ,[]):
identifier[docs] . identifier[append] ( identifier[format_doc] ( identifier[hit] , identifier[schema] , identifier[dates] ))
keyword[return] identifier[ElasticCursor] ( identifier[hits] , identifier[docs] )
|
def _parse_hits(self, hits, resource):
"""Parse hits response into documents."""
datasource = self.get_datasource(resource)
schema = {}
schema.update(config.DOMAIN[datasource[0]].get('schema', {}))
schema.update(config.DOMAIN[resource].get('schema', {}))
dates = get_dates(schema)
docs = []
for hit in hits.get('hits', {}).get('hits', []):
docs.append(format_doc(hit, schema, dates)) # depends on [control=['for'], data=['hit']]
return ElasticCursor(hits, docs)
|
def signature_type(self):
"""Return the signature type used in this MAR.
Returns:
One of None, 'unknown', 'sha1', or 'sha384'
"""
if not self.mardata.signatures:
return None
for sig in self.mardata.signatures.sigs:
if sig.algorithm_id == 1:
return 'sha1'
elif sig.algorithm_id == 2:
return 'sha384'
else:
return 'unknown'
|
def function[signature_type, parameter[self]]:
constant[Return the signature type used in this MAR.
Returns:
One of None, 'unknown', 'sha1', or 'sha384'
]
if <ast.UnaryOp object at 0x7da1b04c9720> begin[:]
return[constant[None]]
for taget[name[sig]] in starred[name[self].mardata.signatures.sigs] begin[:]
if compare[name[sig].algorithm_id equal[==] constant[1]] begin[:]
return[constant[sha1]]
|
keyword[def] identifier[signature_type] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[mardata] . identifier[signatures] :
keyword[return] keyword[None]
keyword[for] identifier[sig] keyword[in] identifier[self] . identifier[mardata] . identifier[signatures] . identifier[sigs] :
keyword[if] identifier[sig] . identifier[algorithm_id] == literal[int] :
keyword[return] literal[string]
keyword[elif] identifier[sig] . identifier[algorithm_id] == literal[int] :
keyword[return] literal[string]
keyword[else] :
keyword[return] literal[string]
|
def signature_type(self):
"""Return the signature type used in this MAR.
Returns:
One of None, 'unknown', 'sha1', or 'sha384'
"""
if not self.mardata.signatures:
return None # depends on [control=['if'], data=[]]
for sig in self.mardata.signatures.sigs:
if sig.algorithm_id == 1:
return 'sha1' # depends on [control=['if'], data=[]]
elif sig.algorithm_id == 2:
return 'sha384' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sig']]
else:
return 'unknown'
|
def client_start(request, socket, context):
"""
Adds the client triple to CLIENTS.
"""
CLIENTS[socket.session.session_id] = (request, socket, context)
|
def function[client_start, parameter[request, socket, context]]:
constant[
Adds the client triple to CLIENTS.
]
call[name[CLIENTS]][name[socket].session.session_id] assign[=] tuple[[<ast.Name object at 0x7da18c4cf700>, <ast.Name object at 0x7da18c4cfd60>, <ast.Name object at 0x7da18c4cd3f0>]]
|
keyword[def] identifier[client_start] ( identifier[request] , identifier[socket] , identifier[context] ):
literal[string]
identifier[CLIENTS] [ identifier[socket] . identifier[session] . identifier[session_id] ]=( identifier[request] , identifier[socket] , identifier[context] )
|
def client_start(request, socket, context):
"""
Adds the client triple to CLIENTS.
"""
CLIENTS[socket.session.session_id] = (request, socket, context)
|
def on_btn_delete_fit(self, event):
"""
removes the current interpretation
Parameters
----------
event : the wx.ButtonEvent that triggered this function
"""
self.delete_fit(self.current_fit, specimen=self.s)
|
def function[on_btn_delete_fit, parameter[self, event]]:
constant[
removes the current interpretation
Parameters
----------
event : the wx.ButtonEvent that triggered this function
]
call[name[self].delete_fit, parameter[name[self].current_fit]]
|
keyword[def] identifier[on_btn_delete_fit] ( identifier[self] , identifier[event] ):
literal[string]
identifier[self] . identifier[delete_fit] ( identifier[self] . identifier[current_fit] , identifier[specimen] = identifier[self] . identifier[s] )
|
def on_btn_delete_fit(self, event):
"""
removes the current interpretation
Parameters
----------
event : the wx.ButtonEvent that triggered this function
"""
self.delete_fit(self.current_fit, specimen=self.s)
|
def _export(dataset_input, dataset_output, random_index_column, path, column_names=None, byteorder="=", shuffle=False, selection=False, progress=None, virtual=True, sort=None, ascending=True):
"""
:param DatasetLocal dataset: dataset to export
:param str path: path for file
:param lis[str] column_names: list of column names to export or None for all columns
:param str byteorder: = for native, < for little endian and > for big endian
:param bool shuffle: export rows in random order
:param bool selection: export selection or not
:param progress: progress callback that gets a progress fraction as argument and should return True to continue
:return:
"""
if selection:
if selection == True: # easier to work with the name
selection = "default"
N = len(dataset_input) if not selection else dataset_input.selected_length(selection)
if N == 0:
raise ValueError("Cannot export empty table")
if shuffle and sort:
raise ValueError("Cannot shuffle and sort at the same time")
if shuffle:
shuffle_array = dataset_output.columns[random_index_column]
partial_shuffle = shuffle and len(dataset_input) != N
order_array = None
order_array_inverse = None
# for strings we also need the inverse order_array, keep track of that
has_strings = any([dataset_input.dtype(k) == str_type for k in column_names])
if partial_shuffle:
# if we only export a portion, we need to create the full length random_index array, and
shuffle_array_full = np.random.choice(len(dataset_input), len(dataset_input), replace=False)
# then take a section of it
shuffle_array[:] = shuffle_array_full[shuffle_array_full < N]
del shuffle_array_full
order_array = shuffle_array
elif shuffle:
# better to do this in memory
shuffle_array_memory = np.random.choice(N, N, replace=False)
shuffle_array[:] = shuffle_array_memory
order_array = shuffle_array
if order_array is not None:
indices_r = np.zeros_like(order_array)
indices_r[order_array] = np.arange(len(order_array))
order_array_inverse = indices_r
del indices_r
if sort:
if selection:
raise ValueError("sorting selections not yet supported")
# these indices sort the input array, but we evaluate the input in sequential order and write it out in sorted order
# e.g., not b[:] = a[indices]
# but b[indices_r] = a
logger.info("sorting...")
indices = np.argsort(dataset_input.evaluate(sort))
indices_r = np.zeros_like(indices)
indices_r[indices] = np.arange(len(indices))
if has_strings:
# in this case we already have the inverse ready
order_array_inverse = indices if ascending else indices[:--1]
else:
del indices
order_array = indices_r if ascending else indices_r[::-1]
logger.info("sorting done")
if progress == True:
progress = vaex.utils.progressbar_callable(title="exporting")
progress = progress or (lambda value: True)
progress_total = len(column_names) * len(dataset_input)
progress_status = ProgressStatus()
progress_status.cancelled = False
progress_status.value = 0
if selection:
full_mask = dataset_input.evaluate_selection_mask(selection)
else:
full_mask = None
sparse_groups = collections.defaultdict(list)
sparse_matrices = {} # alternative to a set of matrices, since they are not hashable
string_columns = []
futures = []
thread_pool = concurrent.futures.ThreadPoolExecutor(max_workers=1)
if True:
for column_name in column_names:
sparse_matrix = dataset_output._sparse_matrix(column_name)
if sparse_matrix is not None:
# sparse columns are written differently
sparse_groups[id(sparse_matrix)].append(column_name)
sparse_matrices[id(sparse_matrix)] = sparse_matrix
continue
logger.debug(" exporting column: %s " % column_name)
future = thread_pool.submit(_export_column, dataset_input, dataset_output, column_name, full_mask,
shuffle, sort, selection, N, order_array, order_array_inverse, progress_status)
futures.append(future)
done = False
while not done:
done = True
for future in futures:
try:
future.result(0.1/4)
except concurrent.futures.TimeoutError:
done = False
break
if not done:
if not progress(progress_status.value / float(progress_total)):
progress_status.cancelled = True
for sparse_matrix_id, column_names in sparse_groups.items():
sparse_matrix = sparse_matrices[sparse_matrix_id]
for column_name in column_names:
assert not shuffle
assert selection in [None, False]
column = dataset_output.columns[column_name]
column.matrix.data[:] = dataset_input.columns[column_name].matrix.data
column.matrix.indptr[:] = dataset_input.columns[column_name].matrix.indptr
column.matrix.indices[:] = dataset_input.columns[column_name].matrix.indices
return column_names
|
def function[_export, parameter[dataset_input, dataset_output, random_index_column, path, column_names, byteorder, shuffle, selection, progress, virtual, sort, ascending]]:
constant[
:param DatasetLocal dataset: dataset to export
:param str path: path for file
:param lis[str] column_names: list of column names to export or None for all columns
:param str byteorder: = for native, < for little endian and > for big endian
:param bool shuffle: export rows in random order
:param bool selection: export selection or not
:param progress: progress callback that gets a progress fraction as argument and should return True to continue
:return:
]
if name[selection] begin[:]
if compare[name[selection] equal[==] constant[True]] begin[:]
variable[selection] assign[=] constant[default]
variable[N] assign[=] <ast.IfExp object at 0x7da18dc050f0>
if compare[name[N] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da18dc05630>
if <ast.BoolOp object at 0x7da18dc07220> begin[:]
<ast.Raise object at 0x7da18dc04580>
if name[shuffle] begin[:]
variable[shuffle_array] assign[=] call[name[dataset_output].columns][name[random_index_column]]
variable[partial_shuffle] assign[=] <ast.BoolOp object at 0x7da18dc05870>
variable[order_array] assign[=] constant[None]
variable[order_array_inverse] assign[=] constant[None]
variable[has_strings] assign[=] call[name[any], parameter[<ast.ListComp object at 0x7da18dc04d00>]]
if name[partial_shuffle] begin[:]
variable[shuffle_array_full] assign[=] call[name[np].random.choice, parameter[call[name[len], parameter[name[dataset_input]]], call[name[len], parameter[name[dataset_input]]]]]
call[name[shuffle_array]][<ast.Slice object at 0x7da1b2347bb0>] assign[=] call[name[shuffle_array_full]][compare[name[shuffle_array_full] less[<] name[N]]]
<ast.Delete object at 0x7da1b23463e0>
variable[order_array] assign[=] name[shuffle_array]
if compare[name[order_array] is_not constant[None]] begin[:]
variable[indices_r] assign[=] call[name[np].zeros_like, parameter[name[order_array]]]
call[name[indices_r]][name[order_array]] assign[=] call[name[np].arange, parameter[call[name[len], parameter[name[order_array]]]]]
variable[order_array_inverse] assign[=] name[indices_r]
<ast.Delete object at 0x7da1b2345360>
if name[sort] begin[:]
if name[selection] begin[:]
<ast.Raise object at 0x7da1b2346bf0>
call[name[logger].info, parameter[constant[sorting...]]]
variable[indices] assign[=] call[name[np].argsort, parameter[call[name[dataset_input].evaluate, parameter[name[sort]]]]]
variable[indices_r] assign[=] call[name[np].zeros_like, parameter[name[indices]]]
call[name[indices_r]][name[indices]] assign[=] call[name[np].arange, parameter[call[name[len], parameter[name[indices]]]]]
if name[has_strings] begin[:]
variable[order_array_inverse] assign[=] <ast.IfExp object at 0x7da1b2346a40>
variable[order_array] assign[=] <ast.IfExp object at 0x7da1b2346ef0>
call[name[logger].info, parameter[constant[sorting done]]]
if compare[name[progress] equal[==] constant[True]] begin[:]
variable[progress] assign[=] call[name[vaex].utils.progressbar_callable, parameter[]]
variable[progress] assign[=] <ast.BoolOp object at 0x7da1b2344dc0>
variable[progress_total] assign[=] binary_operation[call[name[len], parameter[name[column_names]]] * call[name[len], parameter[name[dataset_input]]]]
variable[progress_status] assign[=] call[name[ProgressStatus], parameter[]]
name[progress_status].cancelled assign[=] constant[False]
name[progress_status].value assign[=] constant[0]
if name[selection] begin[:]
variable[full_mask] assign[=] call[name[dataset_input].evaluate_selection_mask, parameter[name[selection]]]
variable[sparse_groups] assign[=] call[name[collections].defaultdict, parameter[name[list]]]
variable[sparse_matrices] assign[=] dictionary[[], []]
variable[string_columns] assign[=] list[[]]
variable[futures] assign[=] list[[]]
variable[thread_pool] assign[=] call[name[concurrent].futures.ThreadPoolExecutor, parameter[]]
if constant[True] begin[:]
for taget[name[column_name]] in starred[name[column_names]] begin[:]
variable[sparse_matrix] assign[=] call[name[dataset_output]._sparse_matrix, parameter[name[column_name]]]
if compare[name[sparse_matrix] is_not constant[None]] begin[:]
call[call[name[sparse_groups]][call[name[id], parameter[name[sparse_matrix]]]].append, parameter[name[column_name]]]
call[name[sparse_matrices]][call[name[id], parameter[name[sparse_matrix]]]] assign[=] name[sparse_matrix]
continue
call[name[logger].debug, parameter[binary_operation[constant[ exporting column: %s ] <ast.Mod object at 0x7da2590d6920> name[column_name]]]]
variable[future] assign[=] call[name[thread_pool].submit, parameter[name[_export_column], name[dataset_input], name[dataset_output], name[column_name], name[full_mask], name[shuffle], name[sort], name[selection], name[N], name[order_array], name[order_array_inverse], name[progress_status]]]
call[name[futures].append, parameter[name[future]]]
variable[done] assign[=] constant[False]
while <ast.UnaryOp object at 0x7da18f720af0> begin[:]
variable[done] assign[=] constant[True]
for taget[name[future]] in starred[name[futures]] begin[:]
<ast.Try object at 0x7da18f723130>
if <ast.UnaryOp object at 0x7da18f720cd0> begin[:]
if <ast.UnaryOp object at 0x7da18f723fd0> begin[:]
name[progress_status].cancelled assign[=] constant[True]
for taget[tuple[[<ast.Name object at 0x7da18f721b70>, <ast.Name object at 0x7da18f722bf0>]]] in starred[call[name[sparse_groups].items, parameter[]]] begin[:]
variable[sparse_matrix] assign[=] call[name[sparse_matrices]][name[sparse_matrix_id]]
for taget[name[column_name]] in starred[name[column_names]] begin[:]
assert[<ast.UnaryOp object at 0x7da18f7225c0>]
assert[compare[name[selection] in list[[<ast.Constant object at 0x7da18f720310>, <ast.Constant object at 0x7da18f723370>]]]]
variable[column] assign[=] call[name[dataset_output].columns][name[column_name]]
call[name[column].matrix.data][<ast.Slice object at 0x7da18f721960>] assign[=] call[name[dataset_input].columns][name[column_name]].matrix.data
call[name[column].matrix.indptr][<ast.Slice object at 0x7da18f7214e0>] assign[=] call[name[dataset_input].columns][name[column_name]].matrix.indptr
call[name[column].matrix.indices][<ast.Slice object at 0x7da18f7222c0>] assign[=] call[name[dataset_input].columns][name[column_name]].matrix.indices
return[name[column_names]]
|
keyword[def] identifier[_export] ( identifier[dataset_input] , identifier[dataset_output] , identifier[random_index_column] , identifier[path] , identifier[column_names] = keyword[None] , identifier[byteorder] = literal[string] , identifier[shuffle] = keyword[False] , identifier[selection] = keyword[False] , identifier[progress] = keyword[None] , identifier[virtual] = keyword[True] , identifier[sort] = keyword[None] , identifier[ascending] = keyword[True] ):
literal[string]
keyword[if] identifier[selection] :
keyword[if] identifier[selection] == keyword[True] :
identifier[selection] = literal[string]
identifier[N] = identifier[len] ( identifier[dataset_input] ) keyword[if] keyword[not] identifier[selection] keyword[else] identifier[dataset_input] . identifier[selected_length] ( identifier[selection] )
keyword[if] identifier[N] == literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[shuffle] keyword[and] identifier[sort] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[shuffle] :
identifier[shuffle_array] = identifier[dataset_output] . identifier[columns] [ identifier[random_index_column] ]
identifier[partial_shuffle] = identifier[shuffle] keyword[and] identifier[len] ( identifier[dataset_input] )!= identifier[N]
identifier[order_array] = keyword[None]
identifier[order_array_inverse] = keyword[None]
identifier[has_strings] = identifier[any] ([ identifier[dataset_input] . identifier[dtype] ( identifier[k] )== identifier[str_type] keyword[for] identifier[k] keyword[in] identifier[column_names] ])
keyword[if] identifier[partial_shuffle] :
identifier[shuffle_array_full] = identifier[np] . identifier[random] . identifier[choice] ( identifier[len] ( identifier[dataset_input] ), identifier[len] ( identifier[dataset_input] ), identifier[replace] = keyword[False] )
identifier[shuffle_array] [:]= identifier[shuffle_array_full] [ identifier[shuffle_array_full] < identifier[N] ]
keyword[del] identifier[shuffle_array_full]
identifier[order_array] = identifier[shuffle_array]
keyword[elif] identifier[shuffle] :
identifier[shuffle_array_memory] = identifier[np] . identifier[random] . identifier[choice] ( identifier[N] , identifier[N] , identifier[replace] = keyword[False] )
identifier[shuffle_array] [:]= identifier[shuffle_array_memory]
identifier[order_array] = identifier[shuffle_array]
keyword[if] identifier[order_array] keyword[is] keyword[not] keyword[None] :
identifier[indices_r] = identifier[np] . identifier[zeros_like] ( identifier[order_array] )
identifier[indices_r] [ identifier[order_array] ]= identifier[np] . identifier[arange] ( identifier[len] ( identifier[order_array] ))
identifier[order_array_inverse] = identifier[indices_r]
keyword[del] identifier[indices_r]
keyword[if] identifier[sort] :
keyword[if] identifier[selection] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[logger] . identifier[info] ( literal[string] )
identifier[indices] = identifier[np] . identifier[argsort] ( identifier[dataset_input] . identifier[evaluate] ( identifier[sort] ))
identifier[indices_r] = identifier[np] . identifier[zeros_like] ( identifier[indices] )
identifier[indices_r] [ identifier[indices] ]= identifier[np] . identifier[arange] ( identifier[len] ( identifier[indices] ))
keyword[if] identifier[has_strings] :
identifier[order_array_inverse] = identifier[indices] keyword[if] identifier[ascending] keyword[else] identifier[indices] [:-- literal[int] ]
keyword[else] :
keyword[del] identifier[indices]
identifier[order_array] = identifier[indices_r] keyword[if] identifier[ascending] keyword[else] identifier[indices_r] [::- literal[int] ]
identifier[logger] . identifier[info] ( literal[string] )
keyword[if] identifier[progress] == keyword[True] :
identifier[progress] = identifier[vaex] . identifier[utils] . identifier[progressbar_callable] ( identifier[title] = literal[string] )
identifier[progress] = identifier[progress] keyword[or] ( keyword[lambda] identifier[value] : keyword[True] )
identifier[progress_total] = identifier[len] ( identifier[column_names] )* identifier[len] ( identifier[dataset_input] )
identifier[progress_status] = identifier[ProgressStatus] ()
identifier[progress_status] . identifier[cancelled] = keyword[False]
identifier[progress_status] . identifier[value] = literal[int]
keyword[if] identifier[selection] :
identifier[full_mask] = identifier[dataset_input] . identifier[evaluate_selection_mask] ( identifier[selection] )
keyword[else] :
identifier[full_mask] = keyword[None]
identifier[sparse_groups] = identifier[collections] . identifier[defaultdict] ( identifier[list] )
identifier[sparse_matrices] ={}
identifier[string_columns] =[]
identifier[futures] =[]
identifier[thread_pool] = identifier[concurrent] . identifier[futures] . identifier[ThreadPoolExecutor] ( identifier[max_workers] = literal[int] )
keyword[if] keyword[True] :
keyword[for] identifier[column_name] keyword[in] identifier[column_names] :
identifier[sparse_matrix] = identifier[dataset_output] . identifier[_sparse_matrix] ( identifier[column_name] )
keyword[if] identifier[sparse_matrix] keyword[is] keyword[not] keyword[None] :
identifier[sparse_groups] [ identifier[id] ( identifier[sparse_matrix] )]. identifier[append] ( identifier[column_name] )
identifier[sparse_matrices] [ identifier[id] ( identifier[sparse_matrix] )]= identifier[sparse_matrix]
keyword[continue]
identifier[logger] . identifier[debug] ( literal[string] % identifier[column_name] )
identifier[future] = identifier[thread_pool] . identifier[submit] ( identifier[_export_column] , identifier[dataset_input] , identifier[dataset_output] , identifier[column_name] , identifier[full_mask] ,
identifier[shuffle] , identifier[sort] , identifier[selection] , identifier[N] , identifier[order_array] , identifier[order_array_inverse] , identifier[progress_status] )
identifier[futures] . identifier[append] ( identifier[future] )
identifier[done] = keyword[False]
keyword[while] keyword[not] identifier[done] :
identifier[done] = keyword[True]
keyword[for] identifier[future] keyword[in] identifier[futures] :
keyword[try] :
identifier[future] . identifier[result] ( literal[int] / literal[int] )
keyword[except] identifier[concurrent] . identifier[futures] . identifier[TimeoutError] :
identifier[done] = keyword[False]
keyword[break]
keyword[if] keyword[not] identifier[done] :
keyword[if] keyword[not] identifier[progress] ( identifier[progress_status] . identifier[value] / identifier[float] ( identifier[progress_total] )):
identifier[progress_status] . identifier[cancelled] = keyword[True]
keyword[for] identifier[sparse_matrix_id] , identifier[column_names] keyword[in] identifier[sparse_groups] . identifier[items] ():
identifier[sparse_matrix] = identifier[sparse_matrices] [ identifier[sparse_matrix_id] ]
keyword[for] identifier[column_name] keyword[in] identifier[column_names] :
keyword[assert] keyword[not] identifier[shuffle]
keyword[assert] identifier[selection] keyword[in] [ keyword[None] , keyword[False] ]
identifier[column] = identifier[dataset_output] . identifier[columns] [ identifier[column_name] ]
identifier[column] . identifier[matrix] . identifier[data] [:]= identifier[dataset_input] . identifier[columns] [ identifier[column_name] ]. identifier[matrix] . identifier[data]
identifier[column] . identifier[matrix] . identifier[indptr] [:]= identifier[dataset_input] . identifier[columns] [ identifier[column_name] ]. identifier[matrix] . identifier[indptr]
identifier[column] . identifier[matrix] . identifier[indices] [:]= identifier[dataset_input] . identifier[columns] [ identifier[column_name] ]. identifier[matrix] . identifier[indices]
keyword[return] identifier[column_names]
|
def _export(dataset_input, dataset_output, random_index_column, path, column_names=None, byteorder='=', shuffle=False, selection=False, progress=None, virtual=True, sort=None, ascending=True):
"""
:param DatasetLocal dataset: dataset to export
:param str path: path for file
:param lis[str] column_names: list of column names to export or None for all columns
:param str byteorder: = for native, < for little endian and > for big endian
:param bool shuffle: export rows in random order
:param bool selection: export selection or not
:param progress: progress callback that gets a progress fraction as argument and should return True to continue
:return:
"""
if selection:
if selection == True: # easier to work with the name
selection = 'default' # depends on [control=['if'], data=['selection']] # depends on [control=['if'], data=[]]
N = len(dataset_input) if not selection else dataset_input.selected_length(selection)
if N == 0:
raise ValueError('Cannot export empty table') # depends on [control=['if'], data=[]]
if shuffle and sort:
raise ValueError('Cannot shuffle and sort at the same time') # depends on [control=['if'], data=[]]
if shuffle:
shuffle_array = dataset_output.columns[random_index_column] # depends on [control=['if'], data=[]]
partial_shuffle = shuffle and len(dataset_input) != N
order_array = None
order_array_inverse = None
# for strings we also need the inverse order_array, keep track of that
has_strings = any([dataset_input.dtype(k) == str_type for k in column_names])
if partial_shuffle:
# if we only export a portion, we need to create the full length random_index array, and
shuffle_array_full = np.random.choice(len(dataset_input), len(dataset_input), replace=False)
# then take a section of it
shuffle_array[:] = shuffle_array_full[shuffle_array_full < N]
del shuffle_array_full
order_array = shuffle_array # depends on [control=['if'], data=[]]
elif shuffle:
# better to do this in memory
shuffle_array_memory = np.random.choice(N, N, replace=False)
shuffle_array[:] = shuffle_array_memory
order_array = shuffle_array # depends on [control=['if'], data=[]]
if order_array is not None:
indices_r = np.zeros_like(order_array)
indices_r[order_array] = np.arange(len(order_array))
order_array_inverse = indices_r
del indices_r # depends on [control=['if'], data=['order_array']]
if sort:
if selection:
raise ValueError('sorting selections not yet supported') # depends on [control=['if'], data=[]]
# these indices sort the input array, but we evaluate the input in sequential order and write it out in sorted order
# e.g., not b[:] = a[indices]
# but b[indices_r] = a
logger.info('sorting...')
indices = np.argsort(dataset_input.evaluate(sort))
indices_r = np.zeros_like(indices)
indices_r[indices] = np.arange(len(indices))
if has_strings:
# in this case we already have the inverse ready
order_array_inverse = indices if ascending else indices[:--1] # depends on [control=['if'], data=[]]
else:
del indices
order_array = indices_r if ascending else indices_r[::-1]
logger.info('sorting done') # depends on [control=['if'], data=[]]
if progress == True:
progress = vaex.utils.progressbar_callable(title='exporting') # depends on [control=['if'], data=['progress']]
progress = progress or (lambda value: True)
progress_total = len(column_names) * len(dataset_input)
progress_status = ProgressStatus()
progress_status.cancelled = False
progress_status.value = 0
if selection:
full_mask = dataset_input.evaluate_selection_mask(selection) # depends on [control=['if'], data=[]]
else:
full_mask = None
sparse_groups = collections.defaultdict(list)
sparse_matrices = {} # alternative to a set of matrices, since they are not hashable
string_columns = []
futures = []
thread_pool = concurrent.futures.ThreadPoolExecutor(max_workers=1)
if True:
for column_name in column_names:
sparse_matrix = dataset_output._sparse_matrix(column_name)
if sparse_matrix is not None:
# sparse columns are written differently
sparse_groups[id(sparse_matrix)].append(column_name)
sparse_matrices[id(sparse_matrix)] = sparse_matrix
continue # depends on [control=['if'], data=['sparse_matrix']]
logger.debug(' exporting column: %s ' % column_name)
future = thread_pool.submit(_export_column, dataset_input, dataset_output, column_name, full_mask, shuffle, sort, selection, N, order_array, order_array_inverse, progress_status)
futures.append(future) # depends on [control=['for'], data=['column_name']] # depends on [control=['if'], data=[]]
done = False
while not done:
done = True
for future in futures:
try:
future.result(0.1 / 4) # depends on [control=['try'], data=[]]
except concurrent.futures.TimeoutError:
done = False
break # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['future']]
if not done:
if not progress(progress_status.value / float(progress_total)):
progress_status.cancelled = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
for (sparse_matrix_id, column_names) in sparse_groups.items():
sparse_matrix = sparse_matrices[sparse_matrix_id]
for column_name in column_names:
assert not shuffle
assert selection in [None, False]
column = dataset_output.columns[column_name]
column.matrix.data[:] = dataset_input.columns[column_name].matrix.data
column.matrix.indptr[:] = dataset_input.columns[column_name].matrix.indptr
column.matrix.indices[:] = dataset_input.columns[column_name].matrix.indices # depends on [control=['for'], data=['column_name']] # depends on [control=['for'], data=[]]
return column_names
|
def reverse(self):
"""
Returns a reversed copy of the list.
"""
colors = ColorList.copy(self)
_list.reverse(colors)
return colors
|
def function[reverse, parameter[self]]:
constant[
Returns a reversed copy of the list.
]
variable[colors] assign[=] call[name[ColorList].copy, parameter[name[self]]]
call[name[_list].reverse, parameter[name[colors]]]
return[name[colors]]
|
keyword[def] identifier[reverse] ( identifier[self] ):
literal[string]
identifier[colors] = identifier[ColorList] . identifier[copy] ( identifier[self] )
identifier[_list] . identifier[reverse] ( identifier[colors] )
keyword[return] identifier[colors]
|
def reverse(self):
"""
Returns a reversed copy of the list.
"""
colors = ColorList.copy(self)
_list.reverse(colors)
return colors
|
def _calculate_file_hashes(full_path, f_hashers):
"""
Returns a dictionary of (algorithm, hexdigest) values for the provided
filename
"""
if not os.path.exists(full_path):
raise BagValidationError("%s does not exist" % full_path)
try:
with open(full_path, 'rb') as f:
while True:
block = f.read(1048576)
if not block:
break
for i in list(f_hashers.values()):
i.update(block)
except IOError as e:
raise BagValidationError("could not read %s: %s" % (full_path, str(e)))
except OSError as e:
raise BagValidationError("could not read %s: %s" % (full_path, str(e)))
return dict(
(alg, h.hexdigest()) for alg, h in list(f_hashers.items())
)
|
def function[_calculate_file_hashes, parameter[full_path, f_hashers]]:
constant[
Returns a dictionary of (algorithm, hexdigest) values for the provided
filename
]
if <ast.UnaryOp object at 0x7da18f7225f0> begin[:]
<ast.Raise object at 0x7da18f7215a0>
<ast.Try object at 0x7da18f721c00>
return[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da18f723f10>]]]
|
keyword[def] identifier[_calculate_file_hashes] ( identifier[full_path] , identifier[f_hashers] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[full_path] ):
keyword[raise] identifier[BagValidationError] ( literal[string] % identifier[full_path] )
keyword[try] :
keyword[with] identifier[open] ( identifier[full_path] , literal[string] ) keyword[as] identifier[f] :
keyword[while] keyword[True] :
identifier[block] = identifier[f] . identifier[read] ( literal[int] )
keyword[if] keyword[not] identifier[block] :
keyword[break]
keyword[for] identifier[i] keyword[in] identifier[list] ( identifier[f_hashers] . identifier[values] ()):
identifier[i] . identifier[update] ( identifier[block] )
keyword[except] identifier[IOError] keyword[as] identifier[e] :
keyword[raise] identifier[BagValidationError] ( literal[string] %( identifier[full_path] , identifier[str] ( identifier[e] )))
keyword[except] identifier[OSError] keyword[as] identifier[e] :
keyword[raise] identifier[BagValidationError] ( literal[string] %( identifier[full_path] , identifier[str] ( identifier[e] )))
keyword[return] identifier[dict] (
( identifier[alg] , identifier[h] . identifier[hexdigest] ()) keyword[for] identifier[alg] , identifier[h] keyword[in] identifier[list] ( identifier[f_hashers] . identifier[items] ())
)
|
def _calculate_file_hashes(full_path, f_hashers):
"""
Returns a dictionary of (algorithm, hexdigest) values for the provided
filename
"""
if not os.path.exists(full_path):
raise BagValidationError('%s does not exist' % full_path) # depends on [control=['if'], data=[]]
try:
with open(full_path, 'rb') as f:
while True:
block = f.read(1048576)
if not block:
break # depends on [control=['if'], data=[]]
for i in list(f_hashers.values()):
i.update(block) # depends on [control=['for'], data=['i']] # depends on [control=['while'], data=[]] # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]]
except IOError as e:
raise BagValidationError('could not read %s: %s' % (full_path, str(e))) # depends on [control=['except'], data=['e']]
except OSError as e:
raise BagValidationError('could not read %s: %s' % (full_path, str(e))) # depends on [control=['except'], data=['e']]
return dict(((alg, h.hexdigest()) for (alg, h) in list(f_hashers.items())))
|
def count_below_mean(x):
"""
Returns the number of values in x that are lower than the mean of x
:param x: the time series to calculate the feature of
:type x: numpy.ndarray
:return: the value of this feature
:return type: float
"""
m = np.mean(x)
return np.where(x < m)[0].size
|
def function[count_below_mean, parameter[x]]:
constant[
Returns the number of values in x that are lower than the mean of x
:param x: the time series to calculate the feature of
:type x: numpy.ndarray
:return: the value of this feature
:return type: float
]
variable[m] assign[=] call[name[np].mean, parameter[name[x]]]
return[call[call[name[np].where, parameter[compare[name[x] less[<] name[m]]]]][constant[0]].size]
|
keyword[def] identifier[count_below_mean] ( identifier[x] ):
literal[string]
identifier[m] = identifier[np] . identifier[mean] ( identifier[x] )
keyword[return] identifier[np] . identifier[where] ( identifier[x] < identifier[m] )[ literal[int] ]. identifier[size]
|
def count_below_mean(x):
"""
Returns the number of values in x that are lower than the mean of x
:param x: the time series to calculate the feature of
:type x: numpy.ndarray
:return: the value of this feature
:return type: float
"""
m = np.mean(x)
return np.where(x < m)[0].size
|
def surface_subdivide2(surface, sub_surface_b):
"""Image for :meth`.Surface.subdivide` docstring."""
if NO_IMAGES:
return
# Plot set-up.
figure = plt.figure()
ax = figure.gca()
colors = seaborn.husl_palette(6)
N = 128
s_vals = np.linspace(0.0, 1.0, N + 1)
# Add edges from surface.
add_edges(ax, surface, s_vals, colors[4])
# Now do the same for surface B.
add_edges(ax, sub_surface_b, s_vals, colors[0])
# Add the control points polygon for the original surface.
nodes = surface._nodes[:, (0, 2, 4, 5, 0)]
add_patch(ax, nodes, colors[2], with_nodes=False)
# Add the control points polygon for the sub-surface.
nodes = sub_surface_b._nodes[:, (0, 1, 2, 5, 3, 0)]
add_patch(ax, nodes, colors[1], with_nodes=False)
# Plot **all** the nodes.
sub_nodes = sub_surface_b._nodes
ax.plot(
sub_nodes[0, :],
sub_nodes[1, :],
color="black",
linestyle="None",
marker="o",
)
# Take those same points and add the boundary.
ax.plot(nodes[0, :], nodes[1, :], color="black", linestyle="dashed")
ax.axis("scaled")
ax.set_xlim(-1.125, 2.125)
ax.set_ylim(-0.125, 4.125)
save_image(ax.figure, "surface_subdivide2")
|
def function[surface_subdivide2, parameter[surface, sub_surface_b]]:
constant[Image for :meth`.Surface.subdivide` docstring.]
if name[NO_IMAGES] begin[:]
return[None]
variable[figure] assign[=] call[name[plt].figure, parameter[]]
variable[ax] assign[=] call[name[figure].gca, parameter[]]
variable[colors] assign[=] call[name[seaborn].husl_palette, parameter[constant[6]]]
variable[N] assign[=] constant[128]
variable[s_vals] assign[=] call[name[np].linspace, parameter[constant[0.0], constant[1.0], binary_operation[name[N] + constant[1]]]]
call[name[add_edges], parameter[name[ax], name[surface], name[s_vals], call[name[colors]][constant[4]]]]
call[name[add_edges], parameter[name[ax], name[sub_surface_b], name[s_vals], call[name[colors]][constant[0]]]]
variable[nodes] assign[=] call[name[surface]._nodes][tuple[[<ast.Slice object at 0x7da207f9ae90>, <ast.Tuple object at 0x7da207f9ac80>]]]
call[name[add_patch], parameter[name[ax], name[nodes], call[name[colors]][constant[2]]]]
variable[nodes] assign[=] call[name[sub_surface_b]._nodes][tuple[[<ast.Slice object at 0x7da207f9a9b0>, <ast.Tuple object at 0x7da207f9aec0>]]]
call[name[add_patch], parameter[name[ax], name[nodes], call[name[colors]][constant[1]]]]
variable[sub_nodes] assign[=] name[sub_surface_b]._nodes
call[name[ax].plot, parameter[call[name[sub_nodes]][tuple[[<ast.Constant object at 0x7da207f9a0b0>, <ast.Slice object at 0x7da207f98af0>]]], call[name[sub_nodes]][tuple[[<ast.Constant object at 0x7da207f9b340>, <ast.Slice object at 0x7da207f985b0>]]]]]
call[name[ax].plot, parameter[call[name[nodes]][tuple[[<ast.Constant object at 0x7da207f9b160>, <ast.Slice object at 0x7da207f9b730>]]], call[name[nodes]][tuple[[<ast.Constant object at 0x7da207f9a6b0>, <ast.Slice object at 0x7da207f983a0>]]]]]
call[name[ax].axis, parameter[constant[scaled]]]
call[name[ax].set_xlim, parameter[<ast.UnaryOp object at 0x7da207f99ba0>, constant[2.125]]]
call[name[ax].set_ylim, parameter[<ast.UnaryOp object at 0x7da207f989d0>, constant[4.125]]]
call[name[save_image], parameter[name[ax].figure, constant[surface_subdivide2]]]
|
keyword[def] identifier[surface_subdivide2] ( identifier[surface] , identifier[sub_surface_b] ):
literal[string]
keyword[if] identifier[NO_IMAGES] :
keyword[return]
identifier[figure] = identifier[plt] . identifier[figure] ()
identifier[ax] = identifier[figure] . identifier[gca] ()
identifier[colors] = identifier[seaborn] . identifier[husl_palette] ( literal[int] )
identifier[N] = literal[int]
identifier[s_vals] = identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[N] + literal[int] )
identifier[add_edges] ( identifier[ax] , identifier[surface] , identifier[s_vals] , identifier[colors] [ literal[int] ])
identifier[add_edges] ( identifier[ax] , identifier[sub_surface_b] , identifier[s_vals] , identifier[colors] [ literal[int] ])
identifier[nodes] = identifier[surface] . identifier[_nodes] [:,( literal[int] , literal[int] , literal[int] , literal[int] , literal[int] )]
identifier[add_patch] ( identifier[ax] , identifier[nodes] , identifier[colors] [ literal[int] ], identifier[with_nodes] = keyword[False] )
identifier[nodes] = identifier[sub_surface_b] . identifier[_nodes] [:,( literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] )]
identifier[add_patch] ( identifier[ax] , identifier[nodes] , identifier[colors] [ literal[int] ], identifier[with_nodes] = keyword[False] )
identifier[sub_nodes] = identifier[sub_surface_b] . identifier[_nodes]
identifier[ax] . identifier[plot] (
identifier[sub_nodes] [ literal[int] ,:],
identifier[sub_nodes] [ literal[int] ,:],
identifier[color] = literal[string] ,
identifier[linestyle] = literal[string] ,
identifier[marker] = literal[string] ,
)
identifier[ax] . identifier[plot] ( identifier[nodes] [ literal[int] ,:], identifier[nodes] [ literal[int] ,:], identifier[color] = literal[string] , identifier[linestyle] = literal[string] )
identifier[ax] . identifier[axis] ( literal[string] )
identifier[ax] . identifier[set_xlim] (- literal[int] , literal[int] )
identifier[ax] . identifier[set_ylim] (- literal[int] , literal[int] )
identifier[save_image] ( identifier[ax] . identifier[figure] , literal[string] )
|
def surface_subdivide2(surface, sub_surface_b):
"""Image for :meth`.Surface.subdivide` docstring."""
if NO_IMAGES:
return # depends on [control=['if'], data=[]]
# Plot set-up.
figure = plt.figure()
ax = figure.gca()
colors = seaborn.husl_palette(6)
N = 128
s_vals = np.linspace(0.0, 1.0, N + 1)
# Add edges from surface.
add_edges(ax, surface, s_vals, colors[4])
# Now do the same for surface B.
add_edges(ax, sub_surface_b, s_vals, colors[0])
# Add the control points polygon for the original surface.
nodes = surface._nodes[:, (0, 2, 4, 5, 0)]
add_patch(ax, nodes, colors[2], with_nodes=False)
# Add the control points polygon for the sub-surface.
nodes = sub_surface_b._nodes[:, (0, 1, 2, 5, 3, 0)]
add_patch(ax, nodes, colors[1], with_nodes=False)
# Plot **all** the nodes.
sub_nodes = sub_surface_b._nodes
ax.plot(sub_nodes[0, :], sub_nodes[1, :], color='black', linestyle='None', marker='o')
# Take those same points and add the boundary.
ax.plot(nodes[0, :], nodes[1, :], color='black', linestyle='dashed')
ax.axis('scaled')
ax.set_xlim(-1.125, 2.125)
ax.set_ylim(-0.125, 4.125)
save_image(ax.figure, 'surface_subdivide2')
|
def _write_data_csv(csv_data):
"""
CSV data has been parsed by this point, so take it and write it file by file.
:return:
"""
logger_excel.info("enter write_data_csv")
# Loop for each file and data that is stored
for file in csv_data:
for filename, data in file.items():
# Make sure we're working with the right data types before trying to open and write a file
if isinstance(filename, str) and isinstance(data, list):
try:
with open(filename, 'w+') as f:
w = csv.writer(f)
for line in data:
w.writerow(line)
except Exception:
logger_excel.debug("write_data_csv: Unable to open/write file: {}".format(filename))
logger_excel.info("exit write_data_csv")
return
|
def function[_write_data_csv, parameter[csv_data]]:
constant[
CSV data has been parsed by this point, so take it and write it file by file.
:return:
]
call[name[logger_excel].info, parameter[constant[enter write_data_csv]]]
for taget[name[file]] in starred[name[csv_data]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18fe91c30>, <ast.Name object at 0x7da18fe90b50>]]] in starred[call[name[file].items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da18fe90eb0> begin[:]
<ast.Try object at 0x7da18fe93130>
call[name[logger_excel].info, parameter[constant[exit write_data_csv]]]
return[None]
|
keyword[def] identifier[_write_data_csv] ( identifier[csv_data] ):
literal[string]
identifier[logger_excel] . identifier[info] ( literal[string] )
keyword[for] identifier[file] keyword[in] identifier[csv_data] :
keyword[for] identifier[filename] , identifier[data] keyword[in] identifier[file] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[filename] , identifier[str] ) keyword[and] identifier[isinstance] ( identifier[data] , identifier[list] ):
keyword[try] :
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] :
identifier[w] = identifier[csv] . identifier[writer] ( identifier[f] )
keyword[for] identifier[line] keyword[in] identifier[data] :
identifier[w] . identifier[writerow] ( identifier[line] )
keyword[except] identifier[Exception] :
identifier[logger_excel] . identifier[debug] ( literal[string] . identifier[format] ( identifier[filename] ))
identifier[logger_excel] . identifier[info] ( literal[string] )
keyword[return]
|
def _write_data_csv(csv_data):
"""
CSV data has been parsed by this point, so take it and write it file by file.
:return:
"""
logger_excel.info('enter write_data_csv')
# Loop for each file and data that is stored
for file in csv_data:
for (filename, data) in file.items():
# Make sure we're working with the right data types before trying to open and write a file
if isinstance(filename, str) and isinstance(data, list):
try:
with open(filename, 'w+') as f:
w = csv.writer(f)
for line in data:
w.writerow(line) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]]
except Exception:
logger_excel.debug('write_data_csv: Unable to open/write file: {}'.format(filename)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['file']]
logger_excel.info('exit write_data_csv')
return
|
def create_model(self, role=None, image=None, predictor_cls=None, serializer=None, deserializer=None,
content_type=None, accept=None, vpc_config_override=vpc_utils.VPC_CONFIG_DEFAULT, **kwargs):
"""
Create a model to deploy.
Args:
role (str): The ``ExecutionRoleArn`` IAM Role ARN for the ``Model``, which is also used during
transform jobs. If not specified, the role from the Estimator will be used.
image (str): An container image to use for deploying the model. Defaults to the image used for training.
predictor_cls (RealTimePredictor): The predictor class to use when deploying the model.
serializer (callable): Should accept a single argument, the input data, and return a sequence
of bytes. May provide a content_type attribute that defines the endpoint request content type
deserializer (callable): Should accept two arguments, the result data and the response content type,
and return a sequence of bytes. May provide a content_type attribute that defines th endpoint
response Accept content type.
content_type (str): The invocation ContentType, overriding any content_type from the serializer
accept (str): The invocation Accept, overriding any accept from the deserializer.
vpc_config_override (dict[str, list[str]]): Optional override for VpcConfig set on the model.
Default: use subnets and security groups from this Estimator.
* 'Subnets' (list[str]): List of subnet ids.
* 'SecurityGroupIds' (list[str]): List of security group ids.
The serializer, deserializer, content_type, and accept arguments are only used to define a default
RealTimePredictor. They are ignored if an explicit predictor class is passed in. Other arguments
are passed through to the Model class.
Returns: a Model ready for deployment.
"""
if predictor_cls is None:
def predict_wrapper(endpoint, session):
return RealTimePredictor(endpoint, session, serializer, deserializer, content_type, accept)
predictor_cls = predict_wrapper
role = role or self.role
return Model(self.model_data, image or self.train_image(), role,
vpc_config=self.get_vpc_config(vpc_config_override),
sagemaker_session=self.sagemaker_session, predictor_cls=predictor_cls, **kwargs)
|
def function[create_model, parameter[self, role, image, predictor_cls, serializer, deserializer, content_type, accept, vpc_config_override]]:
constant[
Create a model to deploy.
Args:
role (str): The ``ExecutionRoleArn`` IAM Role ARN for the ``Model``, which is also used during
transform jobs. If not specified, the role from the Estimator will be used.
image (str): An container image to use for deploying the model. Defaults to the image used for training.
predictor_cls (RealTimePredictor): The predictor class to use when deploying the model.
serializer (callable): Should accept a single argument, the input data, and return a sequence
of bytes. May provide a content_type attribute that defines the endpoint request content type
deserializer (callable): Should accept two arguments, the result data and the response content type,
and return a sequence of bytes. May provide a content_type attribute that defines th endpoint
response Accept content type.
content_type (str): The invocation ContentType, overriding any content_type from the serializer
accept (str): The invocation Accept, overriding any accept from the deserializer.
vpc_config_override (dict[str, list[str]]): Optional override for VpcConfig set on the model.
Default: use subnets and security groups from this Estimator.
* 'Subnets' (list[str]): List of subnet ids.
* 'SecurityGroupIds' (list[str]): List of security group ids.
The serializer, deserializer, content_type, and accept arguments are only used to define a default
RealTimePredictor. They are ignored if an explicit predictor class is passed in. Other arguments
are passed through to the Model class.
Returns: a Model ready for deployment.
]
if compare[name[predictor_cls] is constant[None]] begin[:]
def function[predict_wrapper, parameter[endpoint, session]]:
return[call[name[RealTimePredictor], parameter[name[endpoint], name[session], name[serializer], name[deserializer], name[content_type], name[accept]]]]
variable[predictor_cls] assign[=] name[predict_wrapper]
variable[role] assign[=] <ast.BoolOp object at 0x7da1b1c36500>
return[call[name[Model], parameter[name[self].model_data, <ast.BoolOp object at 0x7da1b1c35a80>, name[role]]]]
|
keyword[def] identifier[create_model] ( identifier[self] , identifier[role] = keyword[None] , identifier[image] = keyword[None] , identifier[predictor_cls] = keyword[None] , identifier[serializer] = keyword[None] , identifier[deserializer] = keyword[None] ,
identifier[content_type] = keyword[None] , identifier[accept] = keyword[None] , identifier[vpc_config_override] = identifier[vpc_utils] . identifier[VPC_CONFIG_DEFAULT] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[predictor_cls] keyword[is] keyword[None] :
keyword[def] identifier[predict_wrapper] ( identifier[endpoint] , identifier[session] ):
keyword[return] identifier[RealTimePredictor] ( identifier[endpoint] , identifier[session] , identifier[serializer] , identifier[deserializer] , identifier[content_type] , identifier[accept] )
identifier[predictor_cls] = identifier[predict_wrapper]
identifier[role] = identifier[role] keyword[or] identifier[self] . identifier[role]
keyword[return] identifier[Model] ( identifier[self] . identifier[model_data] , identifier[image] keyword[or] identifier[self] . identifier[train_image] (), identifier[role] ,
identifier[vpc_config] = identifier[self] . identifier[get_vpc_config] ( identifier[vpc_config_override] ),
identifier[sagemaker_session] = identifier[self] . identifier[sagemaker_session] , identifier[predictor_cls] = identifier[predictor_cls] ,** identifier[kwargs] )
|
def create_model(self, role=None, image=None, predictor_cls=None, serializer=None, deserializer=None, content_type=None, accept=None, vpc_config_override=vpc_utils.VPC_CONFIG_DEFAULT, **kwargs):
"""
Create a model to deploy.
Args:
role (str): The ``ExecutionRoleArn`` IAM Role ARN for the ``Model``, which is also used during
transform jobs. If not specified, the role from the Estimator will be used.
image (str): An container image to use for deploying the model. Defaults to the image used for training.
predictor_cls (RealTimePredictor): The predictor class to use when deploying the model.
serializer (callable): Should accept a single argument, the input data, and return a sequence
of bytes. May provide a content_type attribute that defines the endpoint request content type
deserializer (callable): Should accept two arguments, the result data and the response content type,
and return a sequence of bytes. May provide a content_type attribute that defines th endpoint
response Accept content type.
content_type (str): The invocation ContentType, overriding any content_type from the serializer
accept (str): The invocation Accept, overriding any accept from the deserializer.
vpc_config_override (dict[str, list[str]]): Optional override for VpcConfig set on the model.
Default: use subnets and security groups from this Estimator.
* 'Subnets' (list[str]): List of subnet ids.
* 'SecurityGroupIds' (list[str]): List of security group ids.
The serializer, deserializer, content_type, and accept arguments are only used to define a default
RealTimePredictor. They are ignored if an explicit predictor class is passed in. Other arguments
are passed through to the Model class.
Returns: a Model ready for deployment.
"""
if predictor_cls is None:
def predict_wrapper(endpoint, session):
return RealTimePredictor(endpoint, session, serializer, deserializer, content_type, accept)
predictor_cls = predict_wrapper # depends on [control=['if'], data=['predictor_cls']]
role = role or self.role
return Model(self.model_data, image or self.train_image(), role, vpc_config=self.get_vpc_config(vpc_config_override), sagemaker_session=self.sagemaker_session, predictor_cls=predictor_cls, **kwargs)
|
def _create_header(self):
"""
Function to create the GroupHeader (GrpHdr) in the
CstmrCdtTrfInitn Node
"""
# Retrieve the node to which we will append the group header.
CstmrCdtTrfInitn_node = self._xml.find('CstmrCdtTrfInitn')
# Create the header nodes.
GrpHdr_node = ET.Element("GrpHdr")
MsgId_node = ET.Element("MsgId")
CreDtTm_node = ET.Element("CreDtTm")
NbOfTxs_node = ET.Element("NbOfTxs")
CtrlSum_node = ET.Element("CtrlSum")
InitgPty_node = ET.Element("InitgPty")
Nm_node = ET.Element("Nm")
# Add data to some header nodes.
MsgId_node.text = self.msg_id
CreDtTm_node.text = datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S')
Nm_node.text = self._config['name']
# Append the nodes
InitgPty_node.append(Nm_node)
GrpHdr_node.append(MsgId_node)
GrpHdr_node.append(CreDtTm_node)
GrpHdr_node.append(NbOfTxs_node)
GrpHdr_node.append(CtrlSum_node)
GrpHdr_node.append(InitgPty_node)
# Append the header to its parent
CstmrCdtTrfInitn_node.append(GrpHdr_node)
|
def function[_create_header, parameter[self]]:
constant[
Function to create the GroupHeader (GrpHdr) in the
CstmrCdtTrfInitn Node
]
variable[CstmrCdtTrfInitn_node] assign[=] call[name[self]._xml.find, parameter[constant[CstmrCdtTrfInitn]]]
variable[GrpHdr_node] assign[=] call[name[ET].Element, parameter[constant[GrpHdr]]]
variable[MsgId_node] assign[=] call[name[ET].Element, parameter[constant[MsgId]]]
variable[CreDtTm_node] assign[=] call[name[ET].Element, parameter[constant[CreDtTm]]]
variable[NbOfTxs_node] assign[=] call[name[ET].Element, parameter[constant[NbOfTxs]]]
variable[CtrlSum_node] assign[=] call[name[ET].Element, parameter[constant[CtrlSum]]]
variable[InitgPty_node] assign[=] call[name[ET].Element, parameter[constant[InitgPty]]]
variable[Nm_node] assign[=] call[name[ET].Element, parameter[constant[Nm]]]
name[MsgId_node].text assign[=] name[self].msg_id
name[CreDtTm_node].text assign[=] call[call[name[datetime].datetime.now, parameter[]].strftime, parameter[constant[%Y-%m-%dT%H:%M:%S]]]
name[Nm_node].text assign[=] call[name[self]._config][constant[name]]
call[name[InitgPty_node].append, parameter[name[Nm_node]]]
call[name[GrpHdr_node].append, parameter[name[MsgId_node]]]
call[name[GrpHdr_node].append, parameter[name[CreDtTm_node]]]
call[name[GrpHdr_node].append, parameter[name[NbOfTxs_node]]]
call[name[GrpHdr_node].append, parameter[name[CtrlSum_node]]]
call[name[GrpHdr_node].append, parameter[name[InitgPty_node]]]
call[name[CstmrCdtTrfInitn_node].append, parameter[name[GrpHdr_node]]]
|
keyword[def] identifier[_create_header] ( identifier[self] ):
literal[string]
identifier[CstmrCdtTrfInitn_node] = identifier[self] . identifier[_xml] . identifier[find] ( literal[string] )
identifier[GrpHdr_node] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[MsgId_node] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[CreDtTm_node] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[NbOfTxs_node] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[CtrlSum_node] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[InitgPty_node] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[Nm_node] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[MsgId_node] . identifier[text] = identifier[self] . identifier[msg_id]
identifier[CreDtTm_node] . identifier[text] = identifier[datetime] . identifier[datetime] . identifier[now] (). identifier[strftime] ( literal[string] )
identifier[Nm_node] . identifier[text] = identifier[self] . identifier[_config] [ literal[string] ]
identifier[InitgPty_node] . identifier[append] ( identifier[Nm_node] )
identifier[GrpHdr_node] . identifier[append] ( identifier[MsgId_node] )
identifier[GrpHdr_node] . identifier[append] ( identifier[CreDtTm_node] )
identifier[GrpHdr_node] . identifier[append] ( identifier[NbOfTxs_node] )
identifier[GrpHdr_node] . identifier[append] ( identifier[CtrlSum_node] )
identifier[GrpHdr_node] . identifier[append] ( identifier[InitgPty_node] )
identifier[CstmrCdtTrfInitn_node] . identifier[append] ( identifier[GrpHdr_node] )
|
def _create_header(self):
"""
Function to create the GroupHeader (GrpHdr) in the
CstmrCdtTrfInitn Node
"""
# Retrieve the node to which we will append the group header.
CstmrCdtTrfInitn_node = self._xml.find('CstmrCdtTrfInitn')
# Create the header nodes.
GrpHdr_node = ET.Element('GrpHdr')
MsgId_node = ET.Element('MsgId')
CreDtTm_node = ET.Element('CreDtTm')
NbOfTxs_node = ET.Element('NbOfTxs')
CtrlSum_node = ET.Element('CtrlSum')
InitgPty_node = ET.Element('InitgPty')
Nm_node = ET.Element('Nm')
# Add data to some header nodes.
MsgId_node.text = self.msg_id
CreDtTm_node.text = datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S')
Nm_node.text = self._config['name']
# Append the nodes
InitgPty_node.append(Nm_node)
GrpHdr_node.append(MsgId_node)
GrpHdr_node.append(CreDtTm_node)
GrpHdr_node.append(NbOfTxs_node)
GrpHdr_node.append(CtrlSum_node)
GrpHdr_node.append(InitgPty_node)
# Append the header to its parent
CstmrCdtTrfInitn_node.append(GrpHdr_node)
|
def _filter_list(input_list, search_key, search_value):
'''
Filters a list of dictionary by a set of key-value pair.
:param input_list: is a list of dictionaries
:param search_key: is the key we are looking for
:param search_value: is the value we are looking for the key specified in search_key
:return: filered list of dictionaries
'''
output_list = list()
for dictionary in input_list:
if dictionary.get(search_key) == search_value:
output_list.append(dictionary)
return output_list
|
def function[_filter_list, parameter[input_list, search_key, search_value]]:
constant[
Filters a list of dictionary by a set of key-value pair.
:param input_list: is a list of dictionaries
:param search_key: is the key we are looking for
:param search_value: is the value we are looking for the key specified in search_key
:return: filered list of dictionaries
]
variable[output_list] assign[=] call[name[list], parameter[]]
for taget[name[dictionary]] in starred[name[input_list]] begin[:]
if compare[call[name[dictionary].get, parameter[name[search_key]]] equal[==] name[search_value]] begin[:]
call[name[output_list].append, parameter[name[dictionary]]]
return[name[output_list]]
|
keyword[def] identifier[_filter_list] ( identifier[input_list] , identifier[search_key] , identifier[search_value] ):
literal[string]
identifier[output_list] = identifier[list] ()
keyword[for] identifier[dictionary] keyword[in] identifier[input_list] :
keyword[if] identifier[dictionary] . identifier[get] ( identifier[search_key] )== identifier[search_value] :
identifier[output_list] . identifier[append] ( identifier[dictionary] )
keyword[return] identifier[output_list]
|
def _filter_list(input_list, search_key, search_value):
"""
Filters a list of dictionary by a set of key-value pair.
:param input_list: is a list of dictionaries
:param search_key: is the key we are looking for
:param search_value: is the value we are looking for the key specified in search_key
:return: filered list of dictionaries
"""
output_list = list()
for dictionary in input_list:
if dictionary.get(search_key) == search_value:
output_list.append(dictionary) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dictionary']]
return output_list
|
def stop(ctx, yes):
"""Stop experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment stop
```
\b
```bash
$ polyaxon experiment -xp 2 stop
```
"""
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
if not yes and not click.confirm("Are sure you want to stop "
"experiment `{}`".format(_experiment)):
click.echo('Existing without stopping experiment.')
sys.exit(0)
try:
PolyaxonClient().experiment.stop(user, project_name, _experiment)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not stop experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
Printer.print_success("Experiment is being stopped.")
|
def function[stop, parameter[ctx, yes]]:
constant[Stop experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
```bash
$ polyaxon experiment stop
```
```bash
$ polyaxon experiment -xp 2 stop
```
]
<ast.Tuple object at 0x7da1affc0670> assign[=] call[name[get_project_experiment_or_local], parameter[call[name[ctx].obj.get, parameter[constant[project]]], call[name[ctx].obj.get, parameter[constant[experiment]]]]]
if <ast.BoolOp object at 0x7da1affc0ca0> begin[:]
call[name[click].echo, parameter[constant[Existing without stopping experiment.]]]
call[name[sys].exit, parameter[constant[0]]]
<ast.Try object at 0x7da1afe51930>
call[name[Printer].print_success, parameter[constant[Experiment is being stopped.]]]
|
keyword[def] identifier[stop] ( identifier[ctx] , identifier[yes] ):
literal[string]
identifier[user] , identifier[project_name] , identifier[_experiment] = identifier[get_project_experiment_or_local] ( identifier[ctx] . identifier[obj] . identifier[get] ( literal[string] ),
identifier[ctx] . identifier[obj] . identifier[get] ( literal[string] ))
keyword[if] keyword[not] identifier[yes] keyword[and] keyword[not] identifier[click] . identifier[confirm] ( literal[string]
literal[string] . identifier[format] ( identifier[_experiment] )):
identifier[click] . identifier[echo] ( literal[string] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[try] :
identifier[PolyaxonClient] (). identifier[experiment] . identifier[stop] ( identifier[user] , identifier[project_name] , identifier[_experiment] )
keyword[except] ( identifier[PolyaxonHTTPError] , identifier[PolyaxonShouldExitError] , identifier[PolyaxonClientException] ) keyword[as] identifier[e] :
identifier[Printer] . identifier[print_error] ( literal[string] . identifier[format] ( identifier[_experiment] ))
identifier[Printer] . identifier[print_error] ( literal[string] . identifier[format] ( identifier[e] ))
identifier[sys] . identifier[exit] ( literal[int] )
identifier[Printer] . identifier[print_success] ( literal[string] )
|
def stop(ctx, yes):
"""Stop experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\x08
```bash
$ polyaxon experiment stop
```
\x08
```bash
$ polyaxon experiment -xp 2 stop
```
"""
(user, project_name, _experiment) = get_project_experiment_or_local(ctx.obj.get('project'), ctx.obj.get('experiment'))
if not yes and (not click.confirm('Are sure you want to stop experiment `{}`'.format(_experiment))):
click.echo('Existing without stopping experiment.')
sys.exit(0) # depends on [control=['if'], data=[]]
try:
PolyaxonClient().experiment.stop(user, project_name, _experiment) # depends on [control=['try'], data=[]]
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not stop experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1) # depends on [control=['except'], data=['e']]
Printer.print_success('Experiment is being stopped.')
|
def _handle_raw_book(self, dtype, data, ts):
"""Updates the raw order books stored in self.raw_books[chan_id].
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_raw_book: %s - %s - %s", dtype, data, ts)
channel_id, *data = data
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.raw_books[channel_identifier].put(entry)
|
def function[_handle_raw_book, parameter[self, dtype, data, ts]]:
constant[Updates the raw order books stored in self.raw_books[chan_id].
:param dtype:
:param data:
:param ts:
:return:
]
call[name[self].log.debug, parameter[constant[_handle_raw_book: %s - %s - %s], name[dtype], name[data], name[ts]]]
<ast.Tuple object at 0x7da18c4ce590> assign[=] name[data]
variable[channel_identifier] assign[=] call[name[self].channel_directory][name[channel_id]]
variable[entry] assign[=] tuple[[<ast.Name object at 0x7da18c4ccdf0>, <ast.Name object at 0x7da18c4cc7c0>]]
call[call[name[self].raw_books][name[channel_identifier]].put, parameter[name[entry]]]
|
keyword[def] identifier[_handle_raw_book] ( identifier[self] , identifier[dtype] , identifier[data] , identifier[ts] ):
literal[string]
identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[dtype] , identifier[data] , identifier[ts] )
identifier[channel_id] ,* identifier[data] = identifier[data]
identifier[channel_identifier] = identifier[self] . identifier[channel_directory] [ identifier[channel_id] ]
identifier[entry] =( identifier[data] , identifier[ts] )
identifier[self] . identifier[raw_books] [ identifier[channel_identifier] ]. identifier[put] ( identifier[entry] )
|
def _handle_raw_book(self, dtype, data, ts):
"""Updates the raw order books stored in self.raw_books[chan_id].
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug('_handle_raw_book: %s - %s - %s', dtype, data, ts)
(channel_id, *data) = data
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.raw_books[channel_identifier].put(entry)
|
def _setup_piddir(self):
"""Create the directory for the PID file if necessary."""
if self.pidfile is None:
return
piddir = os.path.dirname(self.pidfile)
if not os.path.isdir(piddir):
# Create the directory with sensible mode and ownership
os.makedirs(piddir, 0o777 & ~self.umask)
os.chown(piddir, self.uid, self.gid)
|
def function[_setup_piddir, parameter[self]]:
constant[Create the directory for the PID file if necessary.]
if compare[name[self].pidfile is constant[None]] begin[:]
return[None]
variable[piddir] assign[=] call[name[os].path.dirname, parameter[name[self].pidfile]]
if <ast.UnaryOp object at 0x7da204621300> begin[:]
call[name[os].makedirs, parameter[name[piddir], binary_operation[constant[511] <ast.BitAnd object at 0x7da2590d6b60> <ast.UnaryOp object at 0x7da2046227d0>]]]
call[name[os].chown, parameter[name[piddir], name[self].uid, name[self].gid]]
|
keyword[def] identifier[_setup_piddir] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[pidfile] keyword[is] keyword[None] :
keyword[return]
identifier[piddir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[self] . identifier[pidfile] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[piddir] ):
identifier[os] . identifier[makedirs] ( identifier[piddir] , literal[int] &~ identifier[self] . identifier[umask] )
identifier[os] . identifier[chown] ( identifier[piddir] , identifier[self] . identifier[uid] , identifier[self] . identifier[gid] )
|
def _setup_piddir(self):
"""Create the directory for the PID file if necessary."""
if self.pidfile is None:
return # depends on [control=['if'], data=[]]
piddir = os.path.dirname(self.pidfile)
if not os.path.isdir(piddir):
# Create the directory with sensible mode and ownership
os.makedirs(piddir, 511 & ~self.umask)
os.chown(piddir, self.uid, self.gid) # depends on [control=['if'], data=[]]
|
def validate_gaslimit(self, header: BlockHeader) -> None:
"""
Validate the gas limit on the given header.
"""
parent_header = self.get_block_header_by_hash(header.parent_hash)
low_bound, high_bound = compute_gas_limit_bounds(parent_header)
if header.gas_limit < low_bound:
raise ValidationError(
"The gas limit on block {0} is too low: {1}. It must be at least {2}".format(
encode_hex(header.hash), header.gas_limit, low_bound))
elif header.gas_limit > high_bound:
raise ValidationError(
"The gas limit on block {0} is too high: {1}. It must be at most {2}".format(
encode_hex(header.hash), header.gas_limit, high_bound))
|
def function[validate_gaslimit, parameter[self, header]]:
constant[
Validate the gas limit on the given header.
]
variable[parent_header] assign[=] call[name[self].get_block_header_by_hash, parameter[name[header].parent_hash]]
<ast.Tuple object at 0x7da1b175d4e0> assign[=] call[name[compute_gas_limit_bounds], parameter[name[parent_header]]]
if compare[name[header].gas_limit less[<] name[low_bound]] begin[:]
<ast.Raise object at 0x7da1b175fb20>
|
keyword[def] identifier[validate_gaslimit] ( identifier[self] , identifier[header] : identifier[BlockHeader] )-> keyword[None] :
literal[string]
identifier[parent_header] = identifier[self] . identifier[get_block_header_by_hash] ( identifier[header] . identifier[parent_hash] )
identifier[low_bound] , identifier[high_bound] = identifier[compute_gas_limit_bounds] ( identifier[parent_header] )
keyword[if] identifier[header] . identifier[gas_limit] < identifier[low_bound] :
keyword[raise] identifier[ValidationError] (
literal[string] . identifier[format] (
identifier[encode_hex] ( identifier[header] . identifier[hash] ), identifier[header] . identifier[gas_limit] , identifier[low_bound] ))
keyword[elif] identifier[header] . identifier[gas_limit] > identifier[high_bound] :
keyword[raise] identifier[ValidationError] (
literal[string] . identifier[format] (
identifier[encode_hex] ( identifier[header] . identifier[hash] ), identifier[header] . identifier[gas_limit] , identifier[high_bound] ))
|
def validate_gaslimit(self, header: BlockHeader) -> None:
"""
Validate the gas limit on the given header.
"""
parent_header = self.get_block_header_by_hash(header.parent_hash)
(low_bound, high_bound) = compute_gas_limit_bounds(parent_header)
if header.gas_limit < low_bound:
raise ValidationError('The gas limit on block {0} is too low: {1}. It must be at least {2}'.format(encode_hex(header.hash), header.gas_limit, low_bound)) # depends on [control=['if'], data=['low_bound']]
elif header.gas_limit > high_bound:
raise ValidationError('The gas limit on block {0} is too high: {1}. It must be at most {2}'.format(encode_hex(header.hash), header.gas_limit, high_bound)) # depends on [control=['if'], data=['high_bound']]
|
def find(self, func: Callable[[K, T], bool]) -> TOption[T]:
"""
Usage:
>>> TDict(k1=1, k2=2, k3=3).find(lambda k, v: v == 2)
Option --> 2
>>> TDict(k1=1, k2=2, k3=3).find(lambda k, v: v == 4)
Option --> None
"""
for k, v in self.items():
if func(k, v):
return TOption(v)
return TOption(None)
|
def function[find, parameter[self, func]]:
constant[
Usage:
>>> TDict(k1=1, k2=2, k3=3).find(lambda k, v: v == 2)
Option --> 2
>>> TDict(k1=1, k2=2, k3=3).find(lambda k, v: v == 4)
Option --> None
]
for taget[tuple[[<ast.Name object at 0x7da20e955390>, <ast.Name object at 0x7da20e954610>]]] in starred[call[name[self].items, parameter[]]] begin[:]
if call[name[func], parameter[name[k], name[v]]] begin[:]
return[call[name[TOption], parameter[name[v]]]]
return[call[name[TOption], parameter[constant[None]]]]
|
keyword[def] identifier[find] ( identifier[self] , identifier[func] : identifier[Callable] [[ identifier[K] , identifier[T] ], identifier[bool] ])-> identifier[TOption] [ identifier[T] ]:
literal[string]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[items] ():
keyword[if] identifier[func] ( identifier[k] , identifier[v] ):
keyword[return] identifier[TOption] ( identifier[v] )
keyword[return] identifier[TOption] ( keyword[None] )
|
def find(self, func: Callable[[K, T], bool]) -> TOption[T]:
"""
Usage:
>>> TDict(k1=1, k2=2, k3=3).find(lambda k, v: v == 2)
Option --> 2
>>> TDict(k1=1, k2=2, k3=3).find(lambda k, v: v == 4)
Option --> None
"""
for (k, v) in self.items():
if func(k, v):
return TOption(v) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return TOption(None)
|
def _print(stats, limit, label):
"""
控制输出量
"""
print("TraceMalloc for {}".format(label))
for index, stat in enumerate(stats):
if index < limit:
print(stat)
else:
break
|
def function[_print, parameter[stats, limit, label]]:
constant[
控制输出量
]
call[name[print], parameter[call[constant[TraceMalloc for {}].format, parameter[name[label]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b0049bd0>, <ast.Name object at 0x7da1b00497e0>]]] in starred[call[name[enumerate], parameter[name[stats]]]] begin[:]
if compare[name[index] less[<] name[limit]] begin[:]
call[name[print], parameter[name[stat]]]
|
keyword[def] identifier[_print] ( identifier[stats] , identifier[limit] , identifier[label] ):
literal[string]
identifier[print] ( literal[string] . identifier[format] ( identifier[label] ))
keyword[for] identifier[index] , identifier[stat] keyword[in] identifier[enumerate] ( identifier[stats] ):
keyword[if] identifier[index] < identifier[limit] :
identifier[print] ( identifier[stat] )
keyword[else] :
keyword[break]
|
def _print(stats, limit, label):
"""
控制输出量
"""
print('TraceMalloc for {}'.format(label))
for (index, stat) in enumerate(stats):
if index < limit:
print(stat) # depends on [control=['if'], data=[]]
else:
break # depends on [control=['for'], data=[]]
|
def pick_free_port(hostname=REDIRECT_HOST, port=0):
""" Try to bind a port. Default=0 selects a free port. """
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind((hostname, port)) # port=0 finds an open port
except OSError as e:
log.warning("Could not bind to %s:%s %s", hostname, port, e)
if port == 0:
print('Unable to find an open port for authentication.')
raise AuthenticationException(e)
else:
return pick_free_port(hostname, 0)
addr, port = s.getsockname()
s.close()
return port
|
def function[pick_free_port, parameter[hostname, port]]:
constant[ Try to bind a port. Default=0 selects a free port. ]
import module[socket]
variable[s] assign[=] call[name[socket].socket, parameter[name[socket].AF_INET, name[socket].SOCK_STREAM]]
<ast.Try object at 0x7da1b0781ff0>
<ast.Tuple object at 0x7da1b07805b0> assign[=] call[name[s].getsockname, parameter[]]
call[name[s].close, parameter[]]
return[name[port]]
|
keyword[def] identifier[pick_free_port] ( identifier[hostname] = identifier[REDIRECT_HOST] , identifier[port] = literal[int] ):
literal[string]
keyword[import] identifier[socket]
identifier[s] = identifier[socket] . identifier[socket] ( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[SOCK_STREAM] )
keyword[try] :
identifier[s] . identifier[bind] (( identifier[hostname] , identifier[port] ))
keyword[except] identifier[OSError] keyword[as] identifier[e] :
identifier[log] . identifier[warning] ( literal[string] , identifier[hostname] , identifier[port] , identifier[e] )
keyword[if] identifier[port] == literal[int] :
identifier[print] ( literal[string] )
keyword[raise] identifier[AuthenticationException] ( identifier[e] )
keyword[else] :
keyword[return] identifier[pick_free_port] ( identifier[hostname] , literal[int] )
identifier[addr] , identifier[port] = identifier[s] . identifier[getsockname] ()
identifier[s] . identifier[close] ()
keyword[return] identifier[port]
|
def pick_free_port(hostname=REDIRECT_HOST, port=0):
""" Try to bind a port. Default=0 selects a free port. """
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind((hostname, port)) # port=0 finds an open port # depends on [control=['try'], data=[]]
except OSError as e:
log.warning('Could not bind to %s:%s %s', hostname, port, e)
if port == 0:
print('Unable to find an open port for authentication.')
raise AuthenticationException(e) # depends on [control=['if'], data=[]]
else:
return pick_free_port(hostname, 0) # depends on [control=['except'], data=['e']]
(addr, port) = s.getsockname()
s.close()
return port
|
def lbmethods(self):
'''
List all the load balancer methods
'''
methods = self.bigIP.LocalLB.Pool.typefactory.create(
'LocalLB.LBMethod'
)
return [method[0].split('_', 2)[-1] for method in methods]
|
def function[lbmethods, parameter[self]]:
constant[
List all the load balancer methods
]
variable[methods] assign[=] call[name[self].bigIP.LocalLB.Pool.typefactory.create, parameter[constant[LocalLB.LBMethod]]]
return[<ast.ListComp object at 0x7da1b1c22b00>]
|
keyword[def] identifier[lbmethods] ( identifier[self] ):
literal[string]
identifier[methods] = identifier[self] . identifier[bigIP] . identifier[LocalLB] . identifier[Pool] . identifier[typefactory] . identifier[create] (
literal[string]
)
keyword[return] [ identifier[method] [ literal[int] ]. identifier[split] ( literal[string] , literal[int] )[- literal[int] ] keyword[for] identifier[method] keyword[in] identifier[methods] ]
|
def lbmethods(self):
"""
List all the load balancer methods
"""
methods = self.bigIP.LocalLB.Pool.typefactory.create('LocalLB.LBMethod')
return [method[0].split('_', 2)[-1] for method in methods]
|
def _transform_field(field):
"""transform field for displaying"""
if isinstance(field, bool):
return TRUE if field else FALSE
elif isinstance(field, (list, dict)):
return json.dumps(field, sort_keys=True, ensure_ascii=False)
else:
return field
|
def function[_transform_field, parameter[field]]:
constant[transform field for displaying]
if call[name[isinstance], parameter[name[field], name[bool]]] begin[:]
return[<ast.IfExp object at 0x7da204621930>]
|
keyword[def] identifier[_transform_field] ( identifier[field] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[field] , identifier[bool] ):
keyword[return] identifier[TRUE] keyword[if] identifier[field] keyword[else] identifier[FALSE]
keyword[elif] identifier[isinstance] ( identifier[field] ,( identifier[list] , identifier[dict] )):
keyword[return] identifier[json] . identifier[dumps] ( identifier[field] , identifier[sort_keys] = keyword[True] , identifier[ensure_ascii] = keyword[False] )
keyword[else] :
keyword[return] identifier[field]
|
def _transform_field(field):
"""transform field for displaying"""
if isinstance(field, bool):
return TRUE if field else FALSE # depends on [control=['if'], data=[]]
elif isinstance(field, (list, dict)):
return json.dumps(field, sort_keys=True, ensure_ascii=False) # depends on [control=['if'], data=[]]
else:
return field
|
def show_instance(name, call=None):
'''
Show the details from the provider concerning an instance
'''
if call != 'action':
raise SaltCloudSystemExit(
'The show_instance action must be called with -a or --action.'
)
nodes = list_nodes_full()
# Find under which cloud service the name is listed, if any
if name not in nodes:
return {}
__utils__['cloud.cache_node'](nodes[name], __active_provider_name__, __opts__)
return nodes[name]
|
def function[show_instance, parameter[name, call]]:
constant[
Show the details from the provider concerning an instance
]
if compare[name[call] not_equal[!=] constant[action]] begin[:]
<ast.Raise object at 0x7da1b2089d50>
variable[nodes] assign[=] call[name[list_nodes_full], parameter[]]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[nodes]] begin[:]
return[dictionary[[], []]]
call[call[name[__utils__]][constant[cloud.cache_node]], parameter[call[name[nodes]][name[name]], name[__active_provider_name__], name[__opts__]]]
return[call[name[nodes]][name[name]]]
|
keyword[def] identifier[show_instance] ( identifier[name] , identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
)
identifier[nodes] = identifier[list_nodes_full] ()
keyword[if] identifier[name] keyword[not] keyword[in] identifier[nodes] :
keyword[return] {}
identifier[__utils__] [ literal[string] ]( identifier[nodes] [ identifier[name] ], identifier[__active_provider_name__] , identifier[__opts__] )
keyword[return] identifier[nodes] [ identifier[name] ]
|
def show_instance(name, call=None):
"""
Show the details from the provider concerning an instance
"""
if call != 'action':
raise SaltCloudSystemExit('The show_instance action must be called with -a or --action.') # depends on [control=['if'], data=[]]
nodes = list_nodes_full()
# Find under which cloud service the name is listed, if any
if name not in nodes:
return {} # depends on [control=['if'], data=[]]
__utils__['cloud.cache_node'](nodes[name], __active_provider_name__, __opts__)
return nodes[name]
|
def execution_cls(self):
"""Get execution layer class
"""
name = self.campaign.process.type
for clazz in [ExecutionDriver, SrunExecutionDriver]:
if name == clazz.name:
return clazz
raise NameError("Unknown execution layer: '%s'" % name)
|
def function[execution_cls, parameter[self]]:
constant[Get execution layer class
]
variable[name] assign[=] name[self].campaign.process.type
for taget[name[clazz]] in starred[list[[<ast.Name object at 0x7da20c7cb5e0>, <ast.Name object at 0x7da20c7cb2e0>]]] begin[:]
if compare[name[name] equal[==] name[clazz].name] begin[:]
return[name[clazz]]
<ast.Raise object at 0x7da20c7ca4d0>
|
keyword[def] identifier[execution_cls] ( identifier[self] ):
literal[string]
identifier[name] = identifier[self] . identifier[campaign] . identifier[process] . identifier[type]
keyword[for] identifier[clazz] keyword[in] [ identifier[ExecutionDriver] , identifier[SrunExecutionDriver] ]:
keyword[if] identifier[name] == identifier[clazz] . identifier[name] :
keyword[return] identifier[clazz]
keyword[raise] identifier[NameError] ( literal[string] % identifier[name] )
|
def execution_cls(self):
"""Get execution layer class
"""
name = self.campaign.process.type
for clazz in [ExecutionDriver, SrunExecutionDriver]:
if name == clazz.name:
return clazz # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['clazz']]
raise NameError("Unknown execution layer: '%s'" % name)
|
def cublasDgemv(handle, trans, m, n, alpha, A, lda, x, incx, beta, y, incy):
"""
Matrix-vector product for real general matrix.
"""
status = _libcublas.cublasDgemv_v2(handle,
_CUBLAS_OP[trans], m, n,
ctypes.byref(ctypes.c_double(alpha)),
int(A), lda, int(x), incx,
ctypes.byref(ctypes.c_double(beta)),
int(y), incy)
cublasCheckStatus(status)
|
def function[cublasDgemv, parameter[handle, trans, m, n, alpha, A, lda, x, incx, beta, y, incy]]:
constant[
Matrix-vector product for real general matrix.
]
variable[status] assign[=] call[name[_libcublas].cublasDgemv_v2, parameter[name[handle], call[name[_CUBLAS_OP]][name[trans]], name[m], name[n], call[name[ctypes].byref, parameter[call[name[ctypes].c_double, parameter[name[alpha]]]]], call[name[int], parameter[name[A]]], name[lda], call[name[int], parameter[name[x]]], name[incx], call[name[ctypes].byref, parameter[call[name[ctypes].c_double, parameter[name[beta]]]]], call[name[int], parameter[name[y]]], name[incy]]]
call[name[cublasCheckStatus], parameter[name[status]]]
|
keyword[def] identifier[cublasDgemv] ( identifier[handle] , identifier[trans] , identifier[m] , identifier[n] , identifier[alpha] , identifier[A] , identifier[lda] , identifier[x] , identifier[incx] , identifier[beta] , identifier[y] , identifier[incy] ):
literal[string]
identifier[status] = identifier[_libcublas] . identifier[cublasDgemv_v2] ( identifier[handle] ,
identifier[_CUBLAS_OP] [ identifier[trans] ], identifier[m] , identifier[n] ,
identifier[ctypes] . identifier[byref] ( identifier[ctypes] . identifier[c_double] ( identifier[alpha] )),
identifier[int] ( identifier[A] ), identifier[lda] , identifier[int] ( identifier[x] ), identifier[incx] ,
identifier[ctypes] . identifier[byref] ( identifier[ctypes] . identifier[c_double] ( identifier[beta] )),
identifier[int] ( identifier[y] ), identifier[incy] )
identifier[cublasCheckStatus] ( identifier[status] )
|
def cublasDgemv(handle, trans, m, n, alpha, A, lda, x, incx, beta, y, incy):
"""
Matrix-vector product for real general matrix.
"""
status = _libcublas.cublasDgemv_v2(handle, _CUBLAS_OP[trans], m, n, ctypes.byref(ctypes.c_double(alpha)), int(A), lda, int(x), incx, ctypes.byref(ctypes.c_double(beta)), int(y), incy)
cublasCheckStatus(status)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.