code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def get_abi_tag():
"""Return the ABI tag based on SOABI (if available) or emulate SOABI
(CPython 2, PyPy)."""
soabi = get_config_var('SOABI')
impl = get_abbr_impl()
if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):
d = ''
m = ''
u = ''
if get_flag('Py_DEBUG',
lambda: hasattr(sys, 'gettotalrefcount'),
warn=(impl == 'cp')):
d = 'd'
if get_flag('WITH_PYMALLOC',
lambda: impl == 'cp',
warn=(impl == 'cp')):
m = 'm'
# NB: Modified from ~ `from .extern import six; six.PY2`
PY2 = sys.version_info[0] == 2
if (get_flag('Py_UNICODE_SIZE',
lambda: sys.maxunicode == 0x10ffff,
expected=4,
warn=(impl == 'cp' and PY2)) and PY2):
u = 'u'
abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
elif soabi and soabi.startswith('cpython-'):
abi = 'cp' + soabi.split('-')[1]
elif soabi:
abi = soabi.replace('.', '_').replace('-', '_')
else:
abi = None
return abi | def function[get_abi_tag, parameter[]]:
constant[Return the ABI tag based on SOABI (if available) or emulate SOABI
(CPython 2, PyPy).]
variable[soabi] assign[=] call[name[get_config_var], parameter[constant[SOABI]]]
variable[impl] assign[=] call[name[get_abbr_impl], parameter[]]
if <ast.BoolOp object at 0x7da207f028f0> begin[:]
variable[d] assign[=] constant[]
variable[m] assign[=] constant[]
variable[u] assign[=] constant[]
if call[name[get_flag], parameter[constant[Py_DEBUG], <ast.Lambda object at 0x7da207f01990>]] begin[:]
variable[d] assign[=] constant[d]
if call[name[get_flag], parameter[constant[WITH_PYMALLOC], <ast.Lambda object at 0x7da207f017b0>]] begin[:]
variable[m] assign[=] constant[m]
variable[PY2] assign[=] compare[call[name[sys].version_info][constant[0]] equal[==] constant[2]]
if <ast.BoolOp object at 0x7da204620fd0> begin[:]
variable[u] assign[=] constant[u]
variable[abi] assign[=] binary_operation[constant[%s%s%s%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2046228c0>, <ast.Call object at 0x7da204620b50>, <ast.Name object at 0x7da204621060>, <ast.Name object at 0x7da2046205b0>, <ast.Name object at 0x7da2046227d0>]]]
return[name[abi]] | keyword[def] identifier[get_abi_tag] ():
literal[string]
identifier[soabi] = identifier[get_config_var] ( literal[string] )
identifier[impl] = identifier[get_abbr_impl] ()
keyword[if] keyword[not] identifier[soabi] keyword[and] identifier[impl] keyword[in] { literal[string] , literal[string] } keyword[and] identifier[hasattr] ( identifier[sys] , literal[string] ):
identifier[d] = literal[string]
identifier[m] = literal[string]
identifier[u] = literal[string]
keyword[if] identifier[get_flag] ( literal[string] ,
keyword[lambda] : identifier[hasattr] ( identifier[sys] , literal[string] ),
identifier[warn] =( identifier[impl] == literal[string] )):
identifier[d] = literal[string]
keyword[if] identifier[get_flag] ( literal[string] ,
keyword[lambda] : identifier[impl] == literal[string] ,
identifier[warn] =( identifier[impl] == literal[string] )):
identifier[m] = literal[string]
identifier[PY2] = identifier[sys] . identifier[version_info] [ literal[int] ]== literal[int]
keyword[if] ( identifier[get_flag] ( literal[string] ,
keyword[lambda] : identifier[sys] . identifier[maxunicode] == literal[int] ,
identifier[expected] = literal[int] ,
identifier[warn] =( identifier[impl] == literal[string] keyword[and] identifier[PY2] )) keyword[and] identifier[PY2] ):
identifier[u] = literal[string]
identifier[abi] = literal[string] %( identifier[impl] , identifier[get_impl_ver] (), identifier[d] , identifier[m] , identifier[u] )
keyword[elif] identifier[soabi] keyword[and] identifier[soabi] . identifier[startswith] ( literal[string] ):
identifier[abi] = literal[string] + identifier[soabi] . identifier[split] ( literal[string] )[ literal[int] ]
keyword[elif] identifier[soabi] :
identifier[abi] = identifier[soabi] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
keyword[else] :
identifier[abi] = keyword[None]
keyword[return] identifier[abi] | def get_abi_tag():
"""Return the ABI tag based on SOABI (if available) or emulate SOABI
(CPython 2, PyPy)."""
soabi = get_config_var('SOABI')
impl = get_abbr_impl()
if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):
d = ''
m = ''
u = ''
if get_flag('Py_DEBUG', lambda : hasattr(sys, 'gettotalrefcount'), warn=impl == 'cp'):
d = 'd' # depends on [control=['if'], data=[]]
if get_flag('WITH_PYMALLOC', lambda : impl == 'cp', warn=impl == 'cp'):
m = 'm' # depends on [control=['if'], data=[]]
# NB: Modified from ~ `from .extern import six; six.PY2`
PY2 = sys.version_info[0] == 2
if get_flag('Py_UNICODE_SIZE', lambda : sys.maxunicode == 1114111, expected=4, warn=impl == 'cp' and PY2) and PY2:
u = 'u' # depends on [control=['if'], data=[]]
abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) # depends on [control=['if'], data=[]]
elif soabi and soabi.startswith('cpython-'):
abi = 'cp' + soabi.split('-')[1] # depends on [control=['if'], data=[]]
elif soabi:
abi = soabi.replace('.', '_').replace('-', '_') # depends on [control=['if'], data=[]]
else:
abi = None
return abi |
def decorate_all_methods(decorator):
"""
Build and return a decorator that will decorate all class members.
This will apply the passed decorator to all of the methods in the decorated
class, except the __init__ method, when a class is decorated with it.
"""
def decorate_class(cls):
for name, m in inspect.getmembers(cls, inspect.ismethod):
if name != "__init__":
setattr(cls, name, decorator(m))
return cls
return decorate_class | def function[decorate_all_methods, parameter[decorator]]:
constant[
Build and return a decorator that will decorate all class members.
This will apply the passed decorator to all of the methods in the decorated
class, except the __init__ method, when a class is decorated with it.
]
def function[decorate_class, parameter[cls]]:
for taget[tuple[[<ast.Name object at 0x7da18f00da80>, <ast.Name object at 0x7da18f00ca90>]]] in starred[call[name[inspect].getmembers, parameter[name[cls], name[inspect].ismethod]]] begin[:]
if compare[name[name] not_equal[!=] constant[__init__]] begin[:]
call[name[setattr], parameter[name[cls], name[name], call[name[decorator], parameter[name[m]]]]]
return[name[cls]]
return[name[decorate_class]] | keyword[def] identifier[decorate_all_methods] ( identifier[decorator] ):
literal[string]
keyword[def] identifier[decorate_class] ( identifier[cls] ):
keyword[for] identifier[name] , identifier[m] keyword[in] identifier[inspect] . identifier[getmembers] ( identifier[cls] , identifier[inspect] . identifier[ismethod] ):
keyword[if] identifier[name] != literal[string] :
identifier[setattr] ( identifier[cls] , identifier[name] , identifier[decorator] ( identifier[m] ))
keyword[return] identifier[cls]
keyword[return] identifier[decorate_class] | def decorate_all_methods(decorator):
"""
Build and return a decorator that will decorate all class members.
This will apply the passed decorator to all of the methods in the decorated
class, except the __init__ method, when a class is decorated with it.
"""
def decorate_class(cls):
for (name, m) in inspect.getmembers(cls, inspect.ismethod):
if name != '__init__':
setattr(cls, name, decorator(m)) # depends on [control=['if'], data=['name']] # depends on [control=['for'], data=[]]
return cls
return decorate_class |
def workflow_aggregate(graph: BELGraph,
node: BaseEntity,
key: Optional[str] = None,
tag: Optional[str] = None,
default_score: Optional[float] = None,
runs: Optional[int] = None,
aggregator: Optional[Callable[[Iterable[float]], float]] = None,
) -> Optional[float]:
"""Get the average score over multiple runs.
This function is very simple, and can be copied to do more interesting statistics over the :class:`Runner`
instances. To iterate over the runners themselves, see :func:`workflow`
:param graph: A BEL graph
:param node: The BEL node that is the focus of this analysis
:param key: The key in the node data dictionary representing the experimental data. Defaults to
:data:`pybel_tools.constants.WEIGHT`.
:param tag: The key for the nodes' data dictionaries where the scores will be put. Defaults to 'score'
:param default_score: The initial score for all nodes. This number can go up or down.
:param runs: The number of times to run the heat diffusion workflow. Defaults to 100.
:param aggregator: A function that aggregates a list of scores. Defaults to :func:`numpy.average`.
Could also use: :func:`numpy.mean`, :func:`numpy.median`, :func:`numpy.min`, :func:`numpy.max`
:return: The average score for the target node
"""
runners = workflow(graph, node, key=key, tag=tag, default_score=default_score, runs=runs)
scores = [runner.get_final_score() for runner in runners]
if not scores:
log.warning('Unable to run the heat diffusion workflow for %s', node)
return
if aggregator is None:
return np.average(scores)
return aggregator(scores) | def function[workflow_aggregate, parameter[graph, node, key, tag, default_score, runs, aggregator]]:
constant[Get the average score over multiple runs.
This function is very simple, and can be copied to do more interesting statistics over the :class:`Runner`
instances. To iterate over the runners themselves, see :func:`workflow`
:param graph: A BEL graph
:param node: The BEL node that is the focus of this analysis
:param key: The key in the node data dictionary representing the experimental data. Defaults to
:data:`pybel_tools.constants.WEIGHT`.
:param tag: The key for the nodes' data dictionaries where the scores will be put. Defaults to 'score'
:param default_score: The initial score for all nodes. This number can go up or down.
:param runs: The number of times to run the heat diffusion workflow. Defaults to 100.
:param aggregator: A function that aggregates a list of scores. Defaults to :func:`numpy.average`.
Could also use: :func:`numpy.mean`, :func:`numpy.median`, :func:`numpy.min`, :func:`numpy.max`
:return: The average score for the target node
]
variable[runners] assign[=] call[name[workflow], parameter[name[graph], name[node]]]
variable[scores] assign[=] <ast.ListComp object at 0x7da20c7c8190>
if <ast.UnaryOp object at 0x7da1b00f7dc0> begin[:]
call[name[log].warning, parameter[constant[Unable to run the heat diffusion workflow for %s], name[node]]]
return[None]
if compare[name[aggregator] is constant[None]] begin[:]
return[call[name[np].average, parameter[name[scores]]]]
return[call[name[aggregator], parameter[name[scores]]]] | keyword[def] identifier[workflow_aggregate] ( identifier[graph] : identifier[BELGraph] ,
identifier[node] : identifier[BaseEntity] ,
identifier[key] : identifier[Optional] [ identifier[str] ]= keyword[None] ,
identifier[tag] : identifier[Optional] [ identifier[str] ]= keyword[None] ,
identifier[default_score] : identifier[Optional] [ identifier[float] ]= keyword[None] ,
identifier[runs] : identifier[Optional] [ identifier[int] ]= keyword[None] ,
identifier[aggregator] : identifier[Optional] [ identifier[Callable] [[ identifier[Iterable] [ identifier[float] ]], identifier[float] ]]= keyword[None] ,
)-> identifier[Optional] [ identifier[float] ]:
literal[string]
identifier[runners] = identifier[workflow] ( identifier[graph] , identifier[node] , identifier[key] = identifier[key] , identifier[tag] = identifier[tag] , identifier[default_score] = identifier[default_score] , identifier[runs] = identifier[runs] )
identifier[scores] =[ identifier[runner] . identifier[get_final_score] () keyword[for] identifier[runner] keyword[in] identifier[runners] ]
keyword[if] keyword[not] identifier[scores] :
identifier[log] . identifier[warning] ( literal[string] , identifier[node] )
keyword[return]
keyword[if] identifier[aggregator] keyword[is] keyword[None] :
keyword[return] identifier[np] . identifier[average] ( identifier[scores] )
keyword[return] identifier[aggregator] ( identifier[scores] ) | def workflow_aggregate(graph: BELGraph, node: BaseEntity, key: Optional[str]=None, tag: Optional[str]=None, default_score: Optional[float]=None, runs: Optional[int]=None, aggregator: Optional[Callable[[Iterable[float]], float]]=None) -> Optional[float]:
"""Get the average score over multiple runs.
This function is very simple, and can be copied to do more interesting statistics over the :class:`Runner`
instances. To iterate over the runners themselves, see :func:`workflow`
:param graph: A BEL graph
:param node: The BEL node that is the focus of this analysis
:param key: The key in the node data dictionary representing the experimental data. Defaults to
:data:`pybel_tools.constants.WEIGHT`.
:param tag: The key for the nodes' data dictionaries where the scores will be put. Defaults to 'score'
:param default_score: The initial score for all nodes. This number can go up or down.
:param runs: The number of times to run the heat diffusion workflow. Defaults to 100.
:param aggregator: A function that aggregates a list of scores. Defaults to :func:`numpy.average`.
Could also use: :func:`numpy.mean`, :func:`numpy.median`, :func:`numpy.min`, :func:`numpy.max`
:return: The average score for the target node
"""
runners = workflow(graph, node, key=key, tag=tag, default_score=default_score, runs=runs)
scores = [runner.get_final_score() for runner in runners]
if not scores:
log.warning('Unable to run the heat diffusion workflow for %s', node)
return # depends on [control=['if'], data=[]]
if aggregator is None:
return np.average(scores) # depends on [control=['if'], data=[]]
return aggregator(scores) |
def com_google_fonts_check_italic_angle(ttFont, style):
"""Checking post.italicAngle value."""
failed = False
value = ttFont["post"].italicAngle
# Checking that italicAngle <= 0
if value > 0:
failed = True
yield FAIL, Message("positive",
("The value of post.italicAngle is positive, which"
" is likely a mistake and should become negative,"
" from {} to {}.").format(value, -value))
# Checking that italicAngle is less than 20° (not good) or 30° (bad)
# Also note we invert the value to check it in a clear way
if abs(value) > 30:
failed = True
yield FAIL, Message("over -30 degrees",
("The value of post.italicAngle ({}) is very"
" high (over -30°!) and should be"
" confirmed.").format(value))
elif abs(value) > 20:
failed = True
yield WARN, Message("over -20 degrees",
("The value of post.italicAngle ({}) seems very"
" high (over -20°!) and should be"
" confirmed.").format(value))
# Checking if italicAngle matches font style:
if "Italic" in style:
if ttFont['post'].italicAngle == 0:
failed = True
yield FAIL, Message("zero-italic",
("Font is italic, so post.italicAngle"
" should be non-zero."))
else:
if ttFont["post"].italicAngle != 0:
failed = True
yield FAIL, Message("non-zero-normal",
("Font is not italic, so post.italicAngle"
" should be equal to zero."))
if not failed:
yield PASS, ("Value of post.italicAngle is {}"
" with style='{}'.").format(value, style) | def function[com_google_fonts_check_italic_angle, parameter[ttFont, style]]:
constant[Checking post.italicAngle value.]
variable[failed] assign[=] constant[False]
variable[value] assign[=] call[name[ttFont]][constant[post]].italicAngle
if compare[name[value] greater[>] constant[0]] begin[:]
variable[failed] assign[=] constant[True]
<ast.Yield object at 0x7da1b12c17e0>
if compare[call[name[abs], parameter[name[value]]] greater[>] constant[30]] begin[:]
variable[failed] assign[=] constant[True]
<ast.Yield object at 0x7da1b12c2920>
if compare[constant[Italic] in name[style]] begin[:]
if compare[call[name[ttFont]][constant[post]].italicAngle equal[==] constant[0]] begin[:]
variable[failed] assign[=] constant[True]
<ast.Yield object at 0x7da1b12b5510>
if <ast.UnaryOp object at 0x7da1b1212350> begin[:]
<ast.Yield object at 0x7da1b1213d00> | keyword[def] identifier[com_google_fonts_check_italic_angle] ( identifier[ttFont] , identifier[style] ):
literal[string]
identifier[failed] = keyword[False]
identifier[value] = identifier[ttFont] [ literal[string] ]. identifier[italicAngle]
keyword[if] identifier[value] > literal[int] :
identifier[failed] = keyword[True]
keyword[yield] identifier[FAIL] , identifier[Message] ( literal[string] ,
( literal[string]
literal[string]
literal[string] ). identifier[format] ( identifier[value] ,- identifier[value] ))
keyword[if] identifier[abs] ( identifier[value] )> literal[int] :
identifier[failed] = keyword[True]
keyword[yield] identifier[FAIL] , identifier[Message] ( literal[string] ,
( literal[string]
literal[string]
literal[string] ). identifier[format] ( identifier[value] ))
keyword[elif] identifier[abs] ( identifier[value] )> literal[int] :
identifier[failed] = keyword[True]
keyword[yield] identifier[WARN] , identifier[Message] ( literal[string] ,
( literal[string]
literal[string]
literal[string] ). identifier[format] ( identifier[value] ))
keyword[if] literal[string] keyword[in] identifier[style] :
keyword[if] identifier[ttFont] [ literal[string] ]. identifier[italicAngle] == literal[int] :
identifier[failed] = keyword[True]
keyword[yield] identifier[FAIL] , identifier[Message] ( literal[string] ,
( literal[string]
literal[string] ))
keyword[else] :
keyword[if] identifier[ttFont] [ literal[string] ]. identifier[italicAngle] != literal[int] :
identifier[failed] = keyword[True]
keyword[yield] identifier[FAIL] , identifier[Message] ( literal[string] ,
( literal[string]
literal[string] ))
keyword[if] keyword[not] identifier[failed] :
keyword[yield] identifier[PASS] ,( literal[string]
literal[string] ). identifier[format] ( identifier[value] , identifier[style] ) | def com_google_fonts_check_italic_angle(ttFont, style):
"""Checking post.italicAngle value."""
failed = False
value = ttFont['post'].italicAngle
# Checking that italicAngle <= 0
if value > 0:
failed = True
yield (FAIL, Message('positive', 'The value of post.italicAngle is positive, which is likely a mistake and should become negative, from {} to {}.'.format(value, -value))) # depends on [control=['if'], data=['value']]
# Checking that italicAngle is less than 20° (not good) or 30° (bad)
# Also note we invert the value to check it in a clear way
if abs(value) > 30:
failed = True
yield (FAIL, Message('over -30 degrees', 'The value of post.italicAngle ({}) is very high (over -30°!) and should be confirmed.'.format(value))) # depends on [control=['if'], data=[]]
elif abs(value) > 20:
failed = True
yield (WARN, Message('over -20 degrees', 'The value of post.italicAngle ({}) seems very high (over -20°!) and should be confirmed.'.format(value))) # depends on [control=['if'], data=[]]
# Checking if italicAngle matches font style:
if 'Italic' in style:
if ttFont['post'].italicAngle == 0:
failed = True
yield (FAIL, Message('zero-italic', 'Font is italic, so post.italicAngle should be non-zero.')) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif ttFont['post'].italicAngle != 0:
failed = True
yield (FAIL, Message('non-zero-normal', 'Font is not italic, so post.italicAngle should be equal to zero.')) # depends on [control=['if'], data=[]]
if not failed:
yield (PASS, "Value of post.italicAngle is {} with style='{}'.".format(value, style)) # depends on [control=['if'], data=[]] |
def remove(self, *l):
''' remove elements from self.value by matching.
Create the exactly same single you want to delete and pass it(them) in.
Normally this method needs to be overwrited by subclass. It only looks inside current instance's value, not recursive. There is no need for a recursive one anyway.
Args:
*l: a single element, a bunch of element seperated by comma, or a list of elements, or any combination. Element is what you match with.
'''
removeList = list(flatten(l))
self._remove(removeList, self.value) | def function[remove, parameter[self]]:
constant[ remove elements from self.value by matching.
Create the exactly same single you want to delete and pass it(them) in.
Normally this method needs to be overwrited by subclass. It only looks inside current instance's value, not recursive. There is no need for a recursive one anyway.
Args:
*l: a single element, a bunch of element seperated by comma, or a list of elements, or any combination. Element is what you match with.
]
variable[removeList] assign[=] call[name[list], parameter[call[name[flatten], parameter[name[l]]]]]
call[name[self]._remove, parameter[name[removeList], name[self].value]] | keyword[def] identifier[remove] ( identifier[self] ,* identifier[l] ):
literal[string]
identifier[removeList] = identifier[list] ( identifier[flatten] ( identifier[l] ))
identifier[self] . identifier[_remove] ( identifier[removeList] , identifier[self] . identifier[value] ) | def remove(self, *l):
""" remove elements from self.value by matching.
Create the exactly same single you want to delete and pass it(them) in.
Normally this method needs to be overwrited by subclass. It only looks inside current instance's value, not recursive. There is no need for a recursive one anyway.
Args:
*l: a single element, a bunch of element seperated by comma, or a list of elements, or any combination. Element is what you match with.
"""
removeList = list(flatten(l))
self._remove(removeList, self.value) |
def load_data(self, data_np):
"""
Load raw numpy data into the viewer.
"""
image = AstroImage.AstroImage(logger=self.logger)
image.set_data(data_np)
self.set_image(image) | def function[load_data, parameter[self, data_np]]:
constant[
Load raw numpy data into the viewer.
]
variable[image] assign[=] call[name[AstroImage].AstroImage, parameter[]]
call[name[image].set_data, parameter[name[data_np]]]
call[name[self].set_image, parameter[name[image]]] | keyword[def] identifier[load_data] ( identifier[self] , identifier[data_np] ):
literal[string]
identifier[image] = identifier[AstroImage] . identifier[AstroImage] ( identifier[logger] = identifier[self] . identifier[logger] )
identifier[image] . identifier[set_data] ( identifier[data_np] )
identifier[self] . identifier[set_image] ( identifier[image] ) | def load_data(self, data_np):
"""
Load raw numpy data into the viewer.
"""
image = AstroImage.AstroImage(logger=self.logger)
image.set_data(data_np)
self.set_image(image) |
def format_context(
context: Context, formatter: typing.Union[str, Formatter] = "full"
) -> str:
"""Output the a context dictionary as a string."""
if not context:
return ""
if callable(formatter):
formatter_func = formatter
else:
if formatter in CONTEXT_FORMATTERS:
formatter_func = CONTEXT_FORMATTERS[formatter]
else:
raise ValueError(f'Invalid context format: "{formatter}"')
return formatter_func(context) | def function[format_context, parameter[context, formatter]]:
constant[Output the a context dictionary as a string.]
if <ast.UnaryOp object at 0x7da204566dd0> begin[:]
return[constant[]]
if call[name[callable], parameter[name[formatter]]] begin[:]
variable[formatter_func] assign[=] name[formatter]
return[call[name[formatter_func], parameter[name[context]]]] | keyword[def] identifier[format_context] (
identifier[context] : identifier[Context] , identifier[formatter] : identifier[typing] . identifier[Union] [ identifier[str] , identifier[Formatter] ]= literal[string]
)-> identifier[str] :
literal[string]
keyword[if] keyword[not] identifier[context] :
keyword[return] literal[string]
keyword[if] identifier[callable] ( identifier[formatter] ):
identifier[formatter_func] = identifier[formatter]
keyword[else] :
keyword[if] identifier[formatter] keyword[in] identifier[CONTEXT_FORMATTERS] :
identifier[formatter_func] = identifier[CONTEXT_FORMATTERS] [ identifier[formatter] ]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[formatter_func] ( identifier[context] ) | def format_context(context: Context, formatter: typing.Union[str, Formatter]='full') -> str:
"""Output the a context dictionary as a string."""
if not context:
return '' # depends on [control=['if'], data=[]]
if callable(formatter):
formatter_func = formatter # depends on [control=['if'], data=[]]
elif formatter in CONTEXT_FORMATTERS:
formatter_func = CONTEXT_FORMATTERS[formatter] # depends on [control=['if'], data=['formatter', 'CONTEXT_FORMATTERS']]
else:
raise ValueError(f'Invalid context format: "{formatter}"')
return formatter_func(context) |
def sql_post(self, owner, id, query, **kwargs):
"""
SQL query
This endpoint executes SQL queries against a dataset. SQL results are available in a variety of formats. By default, `application/json` will be returned. Set the `Accept` header to one of the following values in accordance with your preference: * `text/csv` * `application/json` * `application/json-l` * `application/x-ndjson` New to SQL? Check out data.world's [SQL manual](https://docs.data.world/tutorials/dwsql/) .
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.sql_post(owner, id, query, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str owner: User name and unique identifier of the creator of a dataset or project. For example, in the URL: [https://data.world/jonloyens/an-intro-to-dataworld-dataset](https://data.world/jonloyens/an-intro-to-dataworld-dataset), jonloyens is the unique identifier of the owner. (required)
:param str id: Dataset unique identifier. For example, in the URL:[https://data.world/jonloyens/an-intro-to-dataworld-dataset](https://data.world/jonloyens/an-intro-to-dataworld-dataset), an-intro-to-dataworld-dataset is the unique identifier of the dataset. (required)
:param str query: (required)
:param bool include_table_schema: Flags indicating to include table schema in the response.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.sql_post_with_http_info(owner, id, query, **kwargs)
else:
(data) = self.sql_post_with_http_info(owner, id, query, **kwargs)
return data | def function[sql_post, parameter[self, owner, id, query]]:
constant[
SQL query
This endpoint executes SQL queries against a dataset. SQL results are available in a variety of formats. By default, `application/json` will be returned. Set the `Accept` header to one of the following values in accordance with your preference: * `text/csv` * `application/json` * `application/json-l` * `application/x-ndjson` New to SQL? Check out data.world's [SQL manual](https://docs.data.world/tutorials/dwsql/) .
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.sql_post(owner, id, query, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str owner: User name and unique identifier of the creator of a dataset or project. For example, in the URL: [https://data.world/jonloyens/an-intro-to-dataworld-dataset](https://data.world/jonloyens/an-intro-to-dataworld-dataset), jonloyens is the unique identifier of the owner. (required)
:param str id: Dataset unique identifier. For example, in the URL:[https://data.world/jonloyens/an-intro-to-dataworld-dataset](https://data.world/jonloyens/an-intro-to-dataworld-dataset), an-intro-to-dataworld-dataset is the unique identifier of the dataset. (required)
:param str query: (required)
:param bool include_table_schema: Flags indicating to include table schema in the response.
:return: None
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[callback]]] begin[:]
return[call[name[self].sql_post_with_http_info, parameter[name[owner], name[id], name[query]]]] | keyword[def] identifier[sql_post] ( identifier[self] , identifier[owner] , identifier[id] , identifier[query] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[sql_post_with_http_info] ( identifier[owner] , identifier[id] , identifier[query] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[sql_post_with_http_info] ( identifier[owner] , identifier[id] , identifier[query] ,** identifier[kwargs] )
keyword[return] identifier[data] | def sql_post(self, owner, id, query, **kwargs):
"""
SQL query
This endpoint executes SQL queries against a dataset. SQL results are available in a variety of formats. By default, `application/json` will be returned. Set the `Accept` header to one of the following values in accordance with your preference: * `text/csv` * `application/json` * `application/json-l` * `application/x-ndjson` New to SQL? Check out data.world's [SQL manual](https://docs.data.world/tutorials/dwsql/) .
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.sql_post(owner, id, query, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str owner: User name and unique identifier of the creator of a dataset or project. For example, in the URL: [https://data.world/jonloyens/an-intro-to-dataworld-dataset](https://data.world/jonloyens/an-intro-to-dataworld-dataset), jonloyens is the unique identifier of the owner. (required)
:param str id: Dataset unique identifier. For example, in the URL:[https://data.world/jonloyens/an-intro-to-dataworld-dataset](https://data.world/jonloyens/an-intro-to-dataworld-dataset), an-intro-to-dataworld-dataset is the unique identifier of the dataset. (required)
:param str query: (required)
:param bool include_table_schema: Flags indicating to include table schema in the response.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.sql_post_with_http_info(owner, id, query, **kwargs) # depends on [control=['if'], data=[]]
else:
data = self.sql_post_with_http_info(owner, id, query, **kwargs)
return data |
def simple_profile(self, sex=None):
"""
Generates a basic profile with personal informations
"""
SEX = ["F", "M"]
if sex not in SEX:
sex = self.random_element(SEX)
if sex == 'F':
name = self.generator.name_female()
elif sex == 'M':
name = self.generator.name_male()
return {
"username": self.generator.user_name(),
"name": name,
"sex": sex,
"address": self.generator.address(),
"mail": self.generator.free_email(),
"birthdate": self.generator.date_of_birth(),
} | def function[simple_profile, parameter[self, sex]]:
constant[
Generates a basic profile with personal informations
]
variable[SEX] assign[=] list[[<ast.Constant object at 0x7da18dc9bd60>, <ast.Constant object at 0x7da18dc990f0>]]
if compare[name[sex] <ast.NotIn object at 0x7da2590d7190> name[SEX]] begin[:]
variable[sex] assign[=] call[name[self].random_element, parameter[name[SEX]]]
if compare[name[sex] equal[==] constant[F]] begin[:]
variable[name] assign[=] call[name[self].generator.name_female, parameter[]]
return[dictionary[[<ast.Constant object at 0x7da18dc9b4c0>, <ast.Constant object at 0x7da18dc9b1c0>, <ast.Constant object at 0x7da18dc9ba00>, <ast.Constant object at 0x7da18dc9b670>, <ast.Constant object at 0x7da18dc98cd0>, <ast.Constant object at 0x7da18dc9bbb0>], [<ast.Call object at 0x7da18dc9b490>, <ast.Name object at 0x7da18dc9af80>, <ast.Name object at 0x7da18dc98610>, <ast.Call object at 0x7da18dc981c0>, <ast.Call object at 0x7da18dc9a7a0>, <ast.Call object at 0x7da18dc9b910>]]] | keyword[def] identifier[simple_profile] ( identifier[self] , identifier[sex] = keyword[None] ):
literal[string]
identifier[SEX] =[ literal[string] , literal[string] ]
keyword[if] identifier[sex] keyword[not] keyword[in] identifier[SEX] :
identifier[sex] = identifier[self] . identifier[random_element] ( identifier[SEX] )
keyword[if] identifier[sex] == literal[string] :
identifier[name] = identifier[self] . identifier[generator] . identifier[name_female] ()
keyword[elif] identifier[sex] == literal[string] :
identifier[name] = identifier[self] . identifier[generator] . identifier[name_male] ()
keyword[return] {
literal[string] : identifier[self] . identifier[generator] . identifier[user_name] (),
literal[string] : identifier[name] ,
literal[string] : identifier[sex] ,
literal[string] : identifier[self] . identifier[generator] . identifier[address] (),
literal[string] : identifier[self] . identifier[generator] . identifier[free_email] (),
literal[string] : identifier[self] . identifier[generator] . identifier[date_of_birth] (),
} | def simple_profile(self, sex=None):
"""
Generates a basic profile with personal informations
"""
SEX = ['F', 'M']
if sex not in SEX:
sex = self.random_element(SEX) # depends on [control=['if'], data=['sex', 'SEX']]
if sex == 'F':
name = self.generator.name_female() # depends on [control=['if'], data=[]]
elif sex == 'M':
name = self.generator.name_male() # depends on [control=['if'], data=[]]
return {'username': self.generator.user_name(), 'name': name, 'sex': sex, 'address': self.generator.address(), 'mail': self.generator.free_email(), 'birthdate': self.generator.date_of_birth()} |
def add_state_execution_output_to_scoped_data(self, dictionary, state):
"""Add a state execution output to the scoped data
:param dictionary: The dictionary that is added to the scoped data
:param state: The state that finished execution and provide the dictionary
"""
for output_name, value in dictionary.items():
for output_data_port_key, data_port in list(state.output_data_ports.items()):
if output_name == data_port.name:
if not isinstance(value, data_port.data_type):
if (not ((type(value) is float or type(value) is int) and
(data_port.data_type is float or data_port.data_type is int)) and
not (isinstance(value, type(None)))):
logger.error("The data type of output port {0} should be of type {1}, but is of type {2}".
format(output_name, data_port.data_type, type(value)))
self.scoped_data[str(output_data_port_key) + state.state_id] = \
ScopedData(data_port.name, value, type(value), state.state_id, OutputDataPort, parent=self) | def function[add_state_execution_output_to_scoped_data, parameter[self, dictionary, state]]:
constant[Add a state execution output to the scoped data
:param dictionary: The dictionary that is added to the scoped data
:param state: The state that finished execution and provide the dictionary
]
for taget[tuple[[<ast.Name object at 0x7da1b1a28e20>, <ast.Name object at 0x7da1b1a2b3d0>]]] in starred[call[name[dictionary].items, parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1a2b160>, <ast.Name object at 0x7da1b1a2bee0>]]] in starred[call[name[list], parameter[call[name[state].output_data_ports.items, parameter[]]]]] begin[:]
if compare[name[output_name] equal[==] name[data_port].name] begin[:]
if <ast.UnaryOp object at 0x7da1b1a2a650> begin[:]
if <ast.BoolOp object at 0x7da1b1ab9630> begin[:]
call[name[logger].error, parameter[call[constant[The data type of output port {0} should be of type {1}, but is of type {2}].format, parameter[name[output_name], name[data_port].data_type, call[name[type], parameter[name[value]]]]]]]
call[name[self].scoped_data][binary_operation[call[name[str], parameter[name[output_data_port_key]]] + name[state].state_id]] assign[=] call[name[ScopedData], parameter[name[data_port].name, name[value], call[name[type], parameter[name[value]]], name[state].state_id, name[OutputDataPort]]] | keyword[def] identifier[add_state_execution_output_to_scoped_data] ( identifier[self] , identifier[dictionary] , identifier[state] ):
literal[string]
keyword[for] identifier[output_name] , identifier[value] keyword[in] identifier[dictionary] . identifier[items] ():
keyword[for] identifier[output_data_port_key] , identifier[data_port] keyword[in] identifier[list] ( identifier[state] . identifier[output_data_ports] . identifier[items] ()):
keyword[if] identifier[output_name] == identifier[data_port] . identifier[name] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[data_port] . identifier[data_type] ):
keyword[if] ( keyword[not] (( identifier[type] ( identifier[value] ) keyword[is] identifier[float] keyword[or] identifier[type] ( identifier[value] ) keyword[is] identifier[int] ) keyword[and]
( identifier[data_port] . identifier[data_type] keyword[is] identifier[float] keyword[or] identifier[data_port] . identifier[data_type] keyword[is] identifier[int] )) keyword[and]
keyword[not] ( identifier[isinstance] ( identifier[value] , identifier[type] ( keyword[None] )))):
identifier[logger] . identifier[error] ( literal[string] .
identifier[format] ( identifier[output_name] , identifier[data_port] . identifier[data_type] , identifier[type] ( identifier[value] )))
identifier[self] . identifier[scoped_data] [ identifier[str] ( identifier[output_data_port_key] )+ identifier[state] . identifier[state_id] ]= identifier[ScopedData] ( identifier[data_port] . identifier[name] , identifier[value] , identifier[type] ( identifier[value] ), identifier[state] . identifier[state_id] , identifier[OutputDataPort] , identifier[parent] = identifier[self] ) | def add_state_execution_output_to_scoped_data(self, dictionary, state):
"""Add a state execution output to the scoped data
:param dictionary: The dictionary that is added to the scoped data
:param state: The state that finished execution and provide the dictionary
"""
for (output_name, value) in dictionary.items():
for (output_data_port_key, data_port) in list(state.output_data_ports.items()):
if output_name == data_port.name:
if not isinstance(value, data_port.data_type):
if not ((type(value) is float or type(value) is int) and (data_port.data_type is float or data_port.data_type is int)) and (not isinstance(value, type(None))):
logger.error('The data type of output port {0} should be of type {1}, but is of type {2}'.format(output_name, data_port.data_type, type(value))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
self.scoped_data[str(output_data_port_key) + state.state_id] = ScopedData(data_port.name, value, type(value), state.state_id, OutputDataPort, parent=self) # depends on [control=['if'], data=['output_name']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] |
def var(
self, axis=None, skipna=None, level=None, ddof=1, numeric_only=None, **kwargs
):
"""Computes variance across the DataFrame.
Args:
axis (int): The axis to take the variance on.
skipna (bool): True to skip NA values, false otherwise.
ddof (int): degrees of freedom
Returns:
The variance of the DataFrame.
"""
axis = self._get_axis_number(axis) if axis is not None else 0
if numeric_only is not None and not numeric_only:
self._validate_dtypes(numeric_only=True)
return self._reduce_dimension(
self._query_compiler.var(
axis=axis,
skipna=skipna,
level=level,
ddof=ddof,
numeric_only=numeric_only,
**kwargs
)
) | def function[var, parameter[self, axis, skipna, level, ddof, numeric_only]]:
constant[Computes variance across the DataFrame.
Args:
axis (int): The axis to take the variance on.
skipna (bool): True to skip NA values, false otherwise.
ddof (int): degrees of freedom
Returns:
The variance of the DataFrame.
]
variable[axis] assign[=] <ast.IfExp object at 0x7da18ede5cc0>
if <ast.BoolOp object at 0x7da18ede62c0> begin[:]
call[name[self]._validate_dtypes, parameter[]]
return[call[name[self]._reduce_dimension, parameter[call[name[self]._query_compiler.var, parameter[]]]]] | keyword[def] identifier[var] (
identifier[self] , identifier[axis] = keyword[None] , identifier[skipna] = keyword[None] , identifier[level] = keyword[None] , identifier[ddof] = literal[int] , identifier[numeric_only] = keyword[None] ,** identifier[kwargs]
):
literal[string]
identifier[axis] = identifier[self] . identifier[_get_axis_number] ( identifier[axis] ) keyword[if] identifier[axis] keyword[is] keyword[not] keyword[None] keyword[else] literal[int]
keyword[if] identifier[numeric_only] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[numeric_only] :
identifier[self] . identifier[_validate_dtypes] ( identifier[numeric_only] = keyword[True] )
keyword[return] identifier[self] . identifier[_reduce_dimension] (
identifier[self] . identifier[_query_compiler] . identifier[var] (
identifier[axis] = identifier[axis] ,
identifier[skipna] = identifier[skipna] ,
identifier[level] = identifier[level] ,
identifier[ddof] = identifier[ddof] ,
identifier[numeric_only] = identifier[numeric_only] ,
** identifier[kwargs]
)
) | def var(self, axis=None, skipna=None, level=None, ddof=1, numeric_only=None, **kwargs):
"""Computes variance across the DataFrame.
Args:
axis (int): The axis to take the variance on.
skipna (bool): True to skip NA values, false otherwise.
ddof (int): degrees of freedom
Returns:
The variance of the DataFrame.
"""
axis = self._get_axis_number(axis) if axis is not None else 0
if numeric_only is not None and (not numeric_only):
self._validate_dtypes(numeric_only=True) # depends on [control=['if'], data=[]]
return self._reduce_dimension(self._query_compiler.var(axis=axis, skipna=skipna, level=level, ddof=ddof, numeric_only=numeric_only, **kwargs)) |
def conjugate(self):
"""Complex conjugate of of the product"""
return self.__class__.create(
*[arg.conjugate() for arg in reversed(self.args)]) | def function[conjugate, parameter[self]]:
constant[Complex conjugate of of the product]
return[call[name[self].__class__.create, parameter[<ast.Starred object at 0x7da18c4cd300>]]] | keyword[def] identifier[conjugate] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[__class__] . identifier[create] (
*[ identifier[arg] . identifier[conjugate] () keyword[for] identifier[arg] keyword[in] identifier[reversed] ( identifier[self] . identifier[args] )]) | def conjugate(self):
"""Complex conjugate of of the product"""
return self.__class__.create(*[arg.conjugate() for arg in reversed(self.args)]) |
def watch_printer(watch, value):
"""Print a watched value.
Args:
watch (DataStream): The stream that was watched
value (IOTileReading): The value to was seen
"""
print("({: 8} s) {}: {}".format(value.raw_time, watch, value.value)) | def function[watch_printer, parameter[watch, value]]:
constant[Print a watched value.
Args:
watch (DataStream): The stream that was watched
value (IOTileReading): The value to was seen
]
call[name[print], parameter[call[constant[({: 8} s) {}: {}].format, parameter[name[value].raw_time, name[watch], name[value].value]]]] | keyword[def] identifier[watch_printer] ( identifier[watch] , identifier[value] ):
literal[string]
identifier[print] ( literal[string] . identifier[format] ( identifier[value] . identifier[raw_time] , identifier[watch] , identifier[value] . identifier[value] )) | def watch_printer(watch, value):
"""Print a watched value.
Args:
watch (DataStream): The stream that was watched
value (IOTileReading): The value to was seen
"""
print('({: 8} s) {}: {}'.format(value.raw_time, watch, value.value)) |
def featured_games(self, region):
"""
Get list of featured games.
:param string region: The region to execute this request on
:returns: FeaturedGames
"""
url, query = SpectatorApiV4Urls.featured_games(region=region)
return self._raw_request(self.featured_games.__name__, region, url, query) | def function[featured_games, parameter[self, region]]:
constant[
Get list of featured games.
:param string region: The region to execute this request on
:returns: FeaturedGames
]
<ast.Tuple object at 0x7da1b24b1510> assign[=] call[name[SpectatorApiV4Urls].featured_games, parameter[]]
return[call[name[self]._raw_request, parameter[name[self].featured_games.__name__, name[region], name[url], name[query]]]] | keyword[def] identifier[featured_games] ( identifier[self] , identifier[region] ):
literal[string]
identifier[url] , identifier[query] = identifier[SpectatorApiV4Urls] . identifier[featured_games] ( identifier[region] = identifier[region] )
keyword[return] identifier[self] . identifier[_raw_request] ( identifier[self] . identifier[featured_games] . identifier[__name__] , identifier[region] , identifier[url] , identifier[query] ) | def featured_games(self, region):
"""
Get list of featured games.
:param string region: The region to execute this request on
:returns: FeaturedGames
"""
(url, query) = SpectatorApiV4Urls.featured_games(region=region)
return self._raw_request(self.featured_games.__name__, region, url, query) |
def _adapt_response(self, response):
"""Convert various error responses to standardized ErrorDetails."""
errors, meta = super(ServerError, self)._adapt_response(response)
return errors[0], meta | def function[_adapt_response, parameter[self, response]]:
constant[Convert various error responses to standardized ErrorDetails.]
<ast.Tuple object at 0x7da1b11522f0> assign[=] call[call[name[super], parameter[name[ServerError], name[self]]]._adapt_response, parameter[name[response]]]
return[tuple[[<ast.Subscript object at 0x7da1b1179120>, <ast.Name object at 0x7da1b117a380>]]] | keyword[def] identifier[_adapt_response] ( identifier[self] , identifier[response] ):
literal[string]
identifier[errors] , identifier[meta] = identifier[super] ( identifier[ServerError] , identifier[self] ). identifier[_adapt_response] ( identifier[response] )
keyword[return] identifier[errors] [ literal[int] ], identifier[meta] | def _adapt_response(self, response):
"""Convert various error responses to standardized ErrorDetails."""
(errors, meta) = super(ServerError, self)._adapt_response(response)
return (errors[0], meta) |
def get_process_path(tshark_path=None, process_name="tshark"):
"""
Finds the path of the tshark executable. If the user has provided a path
or specified a location in config.ini it will be used. Otherwise default
locations will be searched.
:param tshark_path: Path of the tshark binary
:raises TSharkNotFoundException in case TShark is not found in any location.
"""
config = get_config()
possible_paths = [config.get(process_name, "%s_path" % process_name)]
# Add the user provided path to the search list
if tshark_path is not None:
possible_paths.insert(0, tshark_path)
# Windows search order: configuration file's path, common paths.
if sys.platform.startswith('win'):
for env in ('ProgramFiles(x86)', 'ProgramFiles'):
program_files = os.getenv(env)
if program_files is not None:
possible_paths.append(
os.path.join(program_files, 'Wireshark', '%s.exe' % process_name)
)
# Linux, etc. search order: configuration file's path, the system's path
else:
os_path = os.getenv(
'PATH',
'/usr/bin:/usr/sbin:/usr/lib/tshark:/usr/local/bin'
)
for path in os_path.split(':'):
possible_paths.append(os.path.join(path, process_name))
for path in possible_paths:
if os.path.exists(path):
if sys.platform.startswith('win'):
path = path.replace("\\", "/")
return path
raise TSharkNotFoundException(
'TShark not found. Try adding its location to the configuration file. '
'Searched these paths: {}'.format(possible_paths)
) | def function[get_process_path, parameter[tshark_path, process_name]]:
constant[
Finds the path of the tshark executable. If the user has provided a path
or specified a location in config.ini it will be used. Otherwise default
locations will be searched.
:param tshark_path: Path of the tshark binary
:raises TSharkNotFoundException in case TShark is not found in any location.
]
variable[config] assign[=] call[name[get_config], parameter[]]
variable[possible_paths] assign[=] list[[<ast.Call object at 0x7da1b1b0e1a0>]]
if compare[name[tshark_path] is_not constant[None]] begin[:]
call[name[possible_paths].insert, parameter[constant[0], name[tshark_path]]]
if call[name[sys].platform.startswith, parameter[constant[win]]] begin[:]
for taget[name[env]] in starred[tuple[[<ast.Constant object at 0x7da1b1b0de40>, <ast.Constant object at 0x7da1b1b0e320>]]] begin[:]
variable[program_files] assign[=] call[name[os].getenv, parameter[name[env]]]
if compare[name[program_files] is_not constant[None]] begin[:]
call[name[possible_paths].append, parameter[call[name[os].path.join, parameter[name[program_files], constant[Wireshark], binary_operation[constant[%s.exe] <ast.Mod object at 0x7da2590d6920> name[process_name]]]]]]
for taget[name[path]] in starred[name[possible_paths]] begin[:]
if call[name[os].path.exists, parameter[name[path]]] begin[:]
if call[name[sys].platform.startswith, parameter[constant[win]]] begin[:]
variable[path] assign[=] call[name[path].replace, parameter[constant[\], constant[/]]]
return[name[path]]
<ast.Raise object at 0x7da1b1b0eb30> | keyword[def] identifier[get_process_path] ( identifier[tshark_path] = keyword[None] , identifier[process_name] = literal[string] ):
literal[string]
identifier[config] = identifier[get_config] ()
identifier[possible_paths] =[ identifier[config] . identifier[get] ( identifier[process_name] , literal[string] % identifier[process_name] )]
keyword[if] identifier[tshark_path] keyword[is] keyword[not] keyword[None] :
identifier[possible_paths] . identifier[insert] ( literal[int] , identifier[tshark_path] )
keyword[if] identifier[sys] . identifier[platform] . identifier[startswith] ( literal[string] ):
keyword[for] identifier[env] keyword[in] ( literal[string] , literal[string] ):
identifier[program_files] = identifier[os] . identifier[getenv] ( identifier[env] )
keyword[if] identifier[program_files] keyword[is] keyword[not] keyword[None] :
identifier[possible_paths] . identifier[append] (
identifier[os] . identifier[path] . identifier[join] ( identifier[program_files] , literal[string] , literal[string] % identifier[process_name] )
)
keyword[else] :
identifier[os_path] = identifier[os] . identifier[getenv] (
literal[string] ,
literal[string]
)
keyword[for] identifier[path] keyword[in] identifier[os_path] . identifier[split] ( literal[string] ):
identifier[possible_paths] . identifier[append] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[process_name] ))
keyword[for] identifier[path] keyword[in] identifier[possible_paths] :
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ):
keyword[if] identifier[sys] . identifier[platform] . identifier[startswith] ( literal[string] ):
identifier[path] = identifier[path] . identifier[replace] ( literal[string] , literal[string] )
keyword[return] identifier[path]
keyword[raise] identifier[TSharkNotFoundException] (
literal[string]
literal[string] . identifier[format] ( identifier[possible_paths] )
) | def get_process_path(tshark_path=None, process_name='tshark'):
"""
Finds the path of the tshark executable. If the user has provided a path
or specified a location in config.ini it will be used. Otherwise default
locations will be searched.
:param tshark_path: Path of the tshark binary
:raises TSharkNotFoundException in case TShark is not found in any location.
"""
config = get_config()
possible_paths = [config.get(process_name, '%s_path' % process_name)]
# Add the user provided path to the search list
if tshark_path is not None:
possible_paths.insert(0, tshark_path) # depends on [control=['if'], data=['tshark_path']]
# Windows search order: configuration file's path, common paths.
if sys.platform.startswith('win'):
for env in ('ProgramFiles(x86)', 'ProgramFiles'):
program_files = os.getenv(env)
if program_files is not None:
possible_paths.append(os.path.join(program_files, 'Wireshark', '%s.exe' % process_name)) # depends on [control=['if'], data=['program_files']] # depends on [control=['for'], data=['env']] # depends on [control=['if'], data=[]]
else:
# Linux, etc. search order: configuration file's path, the system's path
os_path = os.getenv('PATH', '/usr/bin:/usr/sbin:/usr/lib/tshark:/usr/local/bin')
for path in os_path.split(':'):
possible_paths.append(os.path.join(path, process_name)) # depends on [control=['for'], data=['path']]
for path in possible_paths:
if os.path.exists(path):
if sys.platform.startswith('win'):
path = path.replace('\\', '/') # depends on [control=['if'], data=[]]
return path # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['path']]
raise TSharkNotFoundException('TShark not found. Try adding its location to the configuration file. Searched these paths: {}'.format(possible_paths)) |
def create_endpoint_config(self, name, model_name, initial_instance_count, instance_type,
accelerator_type=None, tags=None, kms_key=None):
"""Create an Amazon SageMaker endpoint configuration.
The endpoint configuration identifies the Amazon SageMaker model (created using the
``CreateModel`` API) and the hardware configuration on which to deploy the model. Provide this
endpoint configuration to the ``CreateEndpoint`` API, which then launches the hardware and deploys the model.
Args:
name (str): Name of the Amazon SageMaker endpoint configuration to create.
model_name (str): Name of the Amazon SageMaker ``Model``.
initial_instance_count (int): Minimum number of EC2 instances to launch. The actual number of
active instances for an endpoint at any given time varies due to autoscaling.
instance_type (str): Type of EC2 instance to launch, for example, 'ml.c4.xlarge'.
accelerator_type (str): Type of Elastic Inference accelerator to attach to the instance. For example,
'ml.eia1.medium'. For more information: https://docs.aws.amazon.com/sagemaker/latest/dg/ei.html
tags(List[dict[str, str]]): Optional. The list of tags to add to the endpoint config. Example:
>>> tags = [{'Key': 'tagname', 'Value': 'tagvalue'}]
For more information about tags, see https://boto3.amazonaws.com/v1/documentation\
/api/latest/reference/services/sagemaker.html#SageMaker.Client.add_tags
Returns:
str: Name of the endpoint point configuration created.
"""
LOGGER.info('Creating endpoint-config with name {}'.format(name))
tags = tags or []
request = {
'EndpointConfigName': name,
'ProductionVariants': [
production_variant(model_name, instance_type, initial_instance_count,
accelerator_type=accelerator_type)
],
}
if tags is not None:
request['Tags'] = tags
if kms_key is not None:
request['KmsKeyId'] = kms_key
self.sagemaker_client.create_endpoint_config(**request)
return name | def function[create_endpoint_config, parameter[self, name, model_name, initial_instance_count, instance_type, accelerator_type, tags, kms_key]]:
constant[Create an Amazon SageMaker endpoint configuration.
The endpoint configuration identifies the Amazon SageMaker model (created using the
``CreateModel`` API) and the hardware configuration on which to deploy the model. Provide this
endpoint configuration to the ``CreateEndpoint`` API, which then launches the hardware and deploys the model.
Args:
name (str): Name of the Amazon SageMaker endpoint configuration to create.
model_name (str): Name of the Amazon SageMaker ``Model``.
initial_instance_count (int): Minimum number of EC2 instances to launch. The actual number of
active instances for an endpoint at any given time varies due to autoscaling.
instance_type (str): Type of EC2 instance to launch, for example, 'ml.c4.xlarge'.
accelerator_type (str): Type of Elastic Inference accelerator to attach to the instance. For example,
'ml.eia1.medium'. For more information: https://docs.aws.amazon.com/sagemaker/latest/dg/ei.html
tags(List[dict[str, str]]): Optional. The list of tags to add to the endpoint config. Example:
>>> tags = [{'Key': 'tagname', 'Value': 'tagvalue'}]
For more information about tags, see https://boto3.amazonaws.com/v1/documentation /api/latest/reference/services/sagemaker.html#SageMaker.Client.add_tags
Returns:
str: Name of the endpoint point configuration created.
]
call[name[LOGGER].info, parameter[call[constant[Creating endpoint-config with name {}].format, parameter[name[name]]]]]
variable[tags] assign[=] <ast.BoolOp object at 0x7da1b1c4ab00>
variable[request] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c4b4c0>, <ast.Constant object at 0x7da1b1c497b0>], [<ast.Name object at 0x7da1b1c4bb80>, <ast.List object at 0x7da1b1c4a650>]]
if compare[name[tags] is_not constant[None]] begin[:]
call[name[request]][constant[Tags]] assign[=] name[tags]
if compare[name[kms_key] is_not constant[None]] begin[:]
call[name[request]][constant[KmsKeyId]] assign[=] name[kms_key]
call[name[self].sagemaker_client.create_endpoint_config, parameter[]]
return[name[name]] | keyword[def] identifier[create_endpoint_config] ( identifier[self] , identifier[name] , identifier[model_name] , identifier[initial_instance_count] , identifier[instance_type] ,
identifier[accelerator_type] = keyword[None] , identifier[tags] = keyword[None] , identifier[kms_key] = keyword[None] ):
literal[string]
identifier[LOGGER] . identifier[info] ( literal[string] . identifier[format] ( identifier[name] ))
identifier[tags] = identifier[tags] keyword[or] []
identifier[request] ={
literal[string] : identifier[name] ,
literal[string] :[
identifier[production_variant] ( identifier[model_name] , identifier[instance_type] , identifier[initial_instance_count] ,
identifier[accelerator_type] = identifier[accelerator_type] )
],
}
keyword[if] identifier[tags] keyword[is] keyword[not] keyword[None] :
identifier[request] [ literal[string] ]= identifier[tags]
keyword[if] identifier[kms_key] keyword[is] keyword[not] keyword[None] :
identifier[request] [ literal[string] ]= identifier[kms_key]
identifier[self] . identifier[sagemaker_client] . identifier[create_endpoint_config] (** identifier[request] )
keyword[return] identifier[name] | def create_endpoint_config(self, name, model_name, initial_instance_count, instance_type, accelerator_type=None, tags=None, kms_key=None):
"""Create an Amazon SageMaker endpoint configuration.
The endpoint configuration identifies the Amazon SageMaker model (created using the
``CreateModel`` API) and the hardware configuration on which to deploy the model. Provide this
endpoint configuration to the ``CreateEndpoint`` API, which then launches the hardware and deploys the model.
Args:
name (str): Name of the Amazon SageMaker endpoint configuration to create.
model_name (str): Name of the Amazon SageMaker ``Model``.
initial_instance_count (int): Minimum number of EC2 instances to launch. The actual number of
active instances for an endpoint at any given time varies due to autoscaling.
instance_type (str): Type of EC2 instance to launch, for example, 'ml.c4.xlarge'.
accelerator_type (str): Type of Elastic Inference accelerator to attach to the instance. For example,
'ml.eia1.medium'. For more information: https://docs.aws.amazon.com/sagemaker/latest/dg/ei.html
tags(List[dict[str, str]]): Optional. The list of tags to add to the endpoint config. Example:
>>> tags = [{'Key': 'tagname', 'Value': 'tagvalue'}]
For more information about tags, see https://boto3.amazonaws.com/v1/documentation /api/latest/reference/services/sagemaker.html#SageMaker.Client.add_tags
Returns:
str: Name of the endpoint point configuration created.
"""
LOGGER.info('Creating endpoint-config with name {}'.format(name))
tags = tags or []
request = {'EndpointConfigName': name, 'ProductionVariants': [production_variant(model_name, instance_type, initial_instance_count, accelerator_type=accelerator_type)]}
if tags is not None:
request['Tags'] = tags # depends on [control=['if'], data=['tags']]
if kms_key is not None:
request['KmsKeyId'] = kms_key # depends on [control=['if'], data=['kms_key']]
self.sagemaker_client.create_endpoint_config(**request)
return name |
def mousePressEvent( self, event ):
"""
Overloads the mouse press event to handle special cases and \
bypass when the scene is in view mode.
:param event <QMousePressEvent>
"""
event.setAccepted(False)
self._hotspotPressed = False
# ignore events when the scene is in view mode
scene = self.scene()
if self.isLocked() or (scene and scene.inViewMode()):
event.ignore()
self._ignoreMouseEvents = True
return
self._ignoreMouseEvents = False
# block the selection signals
if scene:
scene.blockSelectionSignals(True)
# clear the selection
if ( not (self.isSelected() or
event.modifiers() == Qt.ControlModifier) ):
for item in scene.selectedItems():
if ( item != self ):
item.setSelected(False)
# determine if we need to start any connections
hotspot = self.hotspotAt(event.pos())
if hotspot and hotspot.isEnabled():
hotspot.slot()(event)
# check if the event is accepted
if ( event.isAccepted() ):
self._hotspotPressed = True
return
# try to start the connection
event.accept()
self._pressTime = datetime.datetime.now()
super(XNode, self).mousePressEvent(event) | def function[mousePressEvent, parameter[self, event]]:
constant[
Overloads the mouse press event to handle special cases and bypass when the scene is in view mode.
:param event <QMousePressEvent>
]
call[name[event].setAccepted, parameter[constant[False]]]
name[self]._hotspotPressed assign[=] constant[False]
variable[scene] assign[=] call[name[self].scene, parameter[]]
if <ast.BoolOp object at 0x7da204623e20> begin[:]
call[name[event].ignore, parameter[]]
name[self]._ignoreMouseEvents assign[=] constant[True]
return[None]
name[self]._ignoreMouseEvents assign[=] constant[False]
if name[scene] begin[:]
call[name[scene].blockSelectionSignals, parameter[constant[True]]]
if <ast.UnaryOp object at 0x7da18f09d8a0> begin[:]
for taget[name[item]] in starred[call[name[scene].selectedItems, parameter[]]] begin[:]
if compare[name[item] not_equal[!=] name[self]] begin[:]
call[name[item].setSelected, parameter[constant[False]]]
variable[hotspot] assign[=] call[name[self].hotspotAt, parameter[call[name[event].pos, parameter[]]]]
if <ast.BoolOp object at 0x7da18f09f8b0> begin[:]
call[call[name[hotspot].slot, parameter[]], parameter[name[event]]]
if call[name[event].isAccepted, parameter[]] begin[:]
name[self]._hotspotPressed assign[=] constant[True]
return[None]
call[name[event].accept, parameter[]]
name[self]._pressTime assign[=] call[name[datetime].datetime.now, parameter[]]
call[call[name[super], parameter[name[XNode], name[self]]].mousePressEvent, parameter[name[event]]] | keyword[def] identifier[mousePressEvent] ( identifier[self] , identifier[event] ):
literal[string]
identifier[event] . identifier[setAccepted] ( keyword[False] )
identifier[self] . identifier[_hotspotPressed] = keyword[False]
identifier[scene] = identifier[self] . identifier[scene] ()
keyword[if] identifier[self] . identifier[isLocked] () keyword[or] ( identifier[scene] keyword[and] identifier[scene] . identifier[inViewMode] ()):
identifier[event] . identifier[ignore] ()
identifier[self] . identifier[_ignoreMouseEvents] = keyword[True]
keyword[return]
identifier[self] . identifier[_ignoreMouseEvents] = keyword[False]
keyword[if] identifier[scene] :
identifier[scene] . identifier[blockSelectionSignals] ( keyword[True] )
keyword[if] ( keyword[not] ( identifier[self] . identifier[isSelected] () keyword[or]
identifier[event] . identifier[modifiers] ()== identifier[Qt] . identifier[ControlModifier] )):
keyword[for] identifier[item] keyword[in] identifier[scene] . identifier[selectedItems] ():
keyword[if] ( identifier[item] != identifier[self] ):
identifier[item] . identifier[setSelected] ( keyword[False] )
identifier[hotspot] = identifier[self] . identifier[hotspotAt] ( identifier[event] . identifier[pos] ())
keyword[if] identifier[hotspot] keyword[and] identifier[hotspot] . identifier[isEnabled] ():
identifier[hotspot] . identifier[slot] ()( identifier[event] )
keyword[if] ( identifier[event] . identifier[isAccepted] ()):
identifier[self] . identifier[_hotspotPressed] = keyword[True]
keyword[return]
identifier[event] . identifier[accept] ()
identifier[self] . identifier[_pressTime] = identifier[datetime] . identifier[datetime] . identifier[now] ()
identifier[super] ( identifier[XNode] , identifier[self] ). identifier[mousePressEvent] ( identifier[event] ) | def mousePressEvent(self, event):
"""
Overloads the mouse press event to handle special cases and bypass when the scene is in view mode.
:param event <QMousePressEvent>
"""
event.setAccepted(False)
self._hotspotPressed = False
# ignore events when the scene is in view mode
scene = self.scene()
if self.isLocked() or (scene and scene.inViewMode()):
event.ignore()
self._ignoreMouseEvents = True
return # depends on [control=['if'], data=[]]
self._ignoreMouseEvents = False
# block the selection signals
if scene:
scene.blockSelectionSignals(True)
# clear the selection
if not (self.isSelected() or event.modifiers() == Qt.ControlModifier):
for item in scene.selectedItems():
if item != self:
item.setSelected(False) # depends on [control=['if'], data=['item']] # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# determine if we need to start any connections
hotspot = self.hotspotAt(event.pos())
if hotspot and hotspot.isEnabled():
hotspot.slot()(event)
# check if the event is accepted
if event.isAccepted():
self._hotspotPressed = True
return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# try to start the connection
event.accept()
self._pressTime = datetime.datetime.now()
super(XNode, self).mousePressEvent(event) |
def read(fnames, markov, progress, leave=True):
"""Read data files and update a generator.
Parameters
----------
fnames : `list` of `str`
File paths.
markov : `markovchain.base.MarkovBase`
Generator to update.
progress : `bool`
Show progress bar.
leave : `bool`, optional
Leave progress bars (default: `True`).
"""
pbar = None
channels = markov.imgtype.channels
tr = markov.scanner.traversal
if progress and not isinstance(tr[0], TraversalProgressWrapper):
tr[0] = TraversalProgressWrapper(tr[0], channels)
tr = tr[0]
try:
with infiles(fnames, progress, leave) as fnames:
for fname in fnames:
if progress:
title = truncate(fname, BAR_DESC_SIZE - 1, False)
pbar = tqdm(
total=markov.levels * len(channels),
desc=title, leave=False, unit='lvl',
bar_format=BAR_FORMAT, dynamic_ncols=True
)
tr.pbar_parent = pbar
markov.data(Image.open(fname), False)
if progress:
pbar.close()
finally:
if pbar is not None:
pbar.close() | def function[read, parameter[fnames, markov, progress, leave]]:
constant[Read data files and update a generator.
Parameters
----------
fnames : `list` of `str`
File paths.
markov : `markovchain.base.MarkovBase`
Generator to update.
progress : `bool`
Show progress bar.
leave : `bool`, optional
Leave progress bars (default: `True`).
]
variable[pbar] assign[=] constant[None]
variable[channels] assign[=] name[markov].imgtype.channels
variable[tr] assign[=] name[markov].scanner.traversal
if <ast.BoolOp object at 0x7da1b2547cd0> begin[:]
call[name[tr]][constant[0]] assign[=] call[name[TraversalProgressWrapper], parameter[call[name[tr]][constant[0]], name[channels]]]
variable[tr] assign[=] call[name[tr]][constant[0]]
<ast.Try object at 0x7da1b2546ec0> | keyword[def] identifier[read] ( identifier[fnames] , identifier[markov] , identifier[progress] , identifier[leave] = keyword[True] ):
literal[string]
identifier[pbar] = keyword[None]
identifier[channels] = identifier[markov] . identifier[imgtype] . identifier[channels]
identifier[tr] = identifier[markov] . identifier[scanner] . identifier[traversal]
keyword[if] identifier[progress] keyword[and] keyword[not] identifier[isinstance] ( identifier[tr] [ literal[int] ], identifier[TraversalProgressWrapper] ):
identifier[tr] [ literal[int] ]= identifier[TraversalProgressWrapper] ( identifier[tr] [ literal[int] ], identifier[channels] )
identifier[tr] = identifier[tr] [ literal[int] ]
keyword[try] :
keyword[with] identifier[infiles] ( identifier[fnames] , identifier[progress] , identifier[leave] ) keyword[as] identifier[fnames] :
keyword[for] identifier[fname] keyword[in] identifier[fnames] :
keyword[if] identifier[progress] :
identifier[title] = identifier[truncate] ( identifier[fname] , identifier[BAR_DESC_SIZE] - literal[int] , keyword[False] )
identifier[pbar] = identifier[tqdm] (
identifier[total] = identifier[markov] . identifier[levels] * identifier[len] ( identifier[channels] ),
identifier[desc] = identifier[title] , identifier[leave] = keyword[False] , identifier[unit] = literal[string] ,
identifier[bar_format] = identifier[BAR_FORMAT] , identifier[dynamic_ncols] = keyword[True]
)
identifier[tr] . identifier[pbar_parent] = identifier[pbar]
identifier[markov] . identifier[data] ( identifier[Image] . identifier[open] ( identifier[fname] ), keyword[False] )
keyword[if] identifier[progress] :
identifier[pbar] . identifier[close] ()
keyword[finally] :
keyword[if] identifier[pbar] keyword[is] keyword[not] keyword[None] :
identifier[pbar] . identifier[close] () | def read(fnames, markov, progress, leave=True):
"""Read data files and update a generator.
Parameters
----------
fnames : `list` of `str`
File paths.
markov : `markovchain.base.MarkovBase`
Generator to update.
progress : `bool`
Show progress bar.
leave : `bool`, optional
Leave progress bars (default: `True`).
"""
pbar = None
channels = markov.imgtype.channels
tr = markov.scanner.traversal
if progress and (not isinstance(tr[0], TraversalProgressWrapper)):
tr[0] = TraversalProgressWrapper(tr[0], channels) # depends on [control=['if'], data=[]]
tr = tr[0]
try:
with infiles(fnames, progress, leave) as fnames:
for fname in fnames:
if progress:
title = truncate(fname, BAR_DESC_SIZE - 1, False)
pbar = tqdm(total=markov.levels * len(channels), desc=title, leave=False, unit='lvl', bar_format=BAR_FORMAT, dynamic_ncols=True)
tr.pbar_parent = pbar # depends on [control=['if'], data=[]]
markov.data(Image.open(fname), False)
if progress:
pbar.close() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['fname']] # depends on [control=['with'], data=['fnames']] # depends on [control=['try'], data=[]]
finally:
if pbar is not None:
pbar.close() # depends on [control=['if'], data=['pbar']] |
def _qnwlege1(n, a, b):
"""
Compute univariate Guass-Legendre quadrature nodes and weights
Parameters
----------
n : int
The number of nodes
a : int
The lower endpoint
b : int
The upper endpoint
Returns
-------
nodes : np.ndarray(dtype=float)
An n element array of nodes
nodes : np.ndarray(dtype=float)
An n element array of weights
Notes
-----
Based of original function ``qnwlege1`` in CompEcon toolbox by
Miranda and Fackler
References
----------
Miranda, Mario J, and Paul L Fackler. Applied Computational
Economics and Finance, MIT Press, 2002.
"""
# import ipdb; ipdb.set_trace()
maxit = 100
m = int(fix((n + 1) / 2.0))
xm = 0.5 * (b + a)
xl = 0.5 * (b - a)
nodes = np.zeros(n)
weights = nodes.copy()
i = np.arange(m)
z = np.cos(np.pi * ((i + 1.0) - 0.25) / (n + 0.5))
for its in range(maxit):
p1 = np.ones_like(z)
p2 = np.zeros_like(z)
for j in range(1, n+1):
p3 = p2
p2 = p1
p1 = ((2 * j - 1) * z * p2 - (j - 1) * p3) / j
pp = n * (z * p1 - p2)/(z * z - 1.0)
z1 = z.copy()
z = z1 - p1/pp
if np.all(np.abs(z - z1) < 1e-14):
break
if its == maxit - 1:
raise ValueError("Maximum iterations in _qnwlege1")
nodes[i] = xm - xl * z
nodes[- i - 1] = xm + xl * z
weights[i] = 2 * xl / ((1 - z * z) * pp * pp)
weights[- i - 1] = weights[i]
return nodes, weights | def function[_qnwlege1, parameter[n, a, b]]:
constant[
Compute univariate Guass-Legendre quadrature nodes and weights
Parameters
----------
n : int
The number of nodes
a : int
The lower endpoint
b : int
The upper endpoint
Returns
-------
nodes : np.ndarray(dtype=float)
An n element array of nodes
nodes : np.ndarray(dtype=float)
An n element array of weights
Notes
-----
Based of original function ``qnwlege1`` in CompEcon toolbox by
Miranda and Fackler
References
----------
Miranda, Mario J, and Paul L Fackler. Applied Computational
Economics and Finance, MIT Press, 2002.
]
variable[maxit] assign[=] constant[100]
variable[m] assign[=] call[name[int], parameter[call[name[fix], parameter[binary_operation[binary_operation[name[n] + constant[1]] / constant[2.0]]]]]]
variable[xm] assign[=] binary_operation[constant[0.5] * binary_operation[name[b] + name[a]]]
variable[xl] assign[=] binary_operation[constant[0.5] * binary_operation[name[b] - name[a]]]
variable[nodes] assign[=] call[name[np].zeros, parameter[name[n]]]
variable[weights] assign[=] call[name[nodes].copy, parameter[]]
variable[i] assign[=] call[name[np].arange, parameter[name[m]]]
variable[z] assign[=] call[name[np].cos, parameter[binary_operation[binary_operation[name[np].pi * binary_operation[binary_operation[name[i] + constant[1.0]] - constant[0.25]]] / binary_operation[name[n] + constant[0.5]]]]]
for taget[name[its]] in starred[call[name[range], parameter[name[maxit]]]] begin[:]
variable[p1] assign[=] call[name[np].ones_like, parameter[name[z]]]
variable[p2] assign[=] call[name[np].zeros_like, parameter[name[z]]]
for taget[name[j]] in starred[call[name[range], parameter[constant[1], binary_operation[name[n] + constant[1]]]]] begin[:]
variable[p3] assign[=] name[p2]
variable[p2] assign[=] name[p1]
variable[p1] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[2] * name[j]] - constant[1]] * name[z]] * name[p2]] - binary_operation[binary_operation[name[j] - constant[1]] * name[p3]]] / name[j]]
variable[pp] assign[=] binary_operation[binary_operation[name[n] * binary_operation[binary_operation[name[z] * name[p1]] - name[p2]]] / binary_operation[binary_operation[name[z] * name[z]] - constant[1.0]]]
variable[z1] assign[=] call[name[z].copy, parameter[]]
variable[z] assign[=] binary_operation[name[z1] - binary_operation[name[p1] / name[pp]]]
if call[name[np].all, parameter[compare[call[name[np].abs, parameter[binary_operation[name[z] - name[z1]]]] less[<] constant[1e-14]]]] begin[:]
break
if compare[name[its] equal[==] binary_operation[name[maxit] - constant[1]]] begin[:]
<ast.Raise object at 0x7da1b1cb02e0>
call[name[nodes]][name[i]] assign[=] binary_operation[name[xm] - binary_operation[name[xl] * name[z]]]
call[name[nodes]][binary_operation[<ast.UnaryOp object at 0x7da1b1cb29e0> - constant[1]]] assign[=] binary_operation[name[xm] + binary_operation[name[xl] * name[z]]]
call[name[weights]][name[i]] assign[=] binary_operation[binary_operation[constant[2] * name[xl]] / binary_operation[binary_operation[binary_operation[constant[1] - binary_operation[name[z] * name[z]]] * name[pp]] * name[pp]]]
call[name[weights]][binary_operation[<ast.UnaryOp object at 0x7da1b1cb34c0> - constant[1]]] assign[=] call[name[weights]][name[i]]
return[tuple[[<ast.Name object at 0x7da1b1cb0c10>, <ast.Name object at 0x7da1b1cb1180>]]] | keyword[def] identifier[_qnwlege1] ( identifier[n] , identifier[a] , identifier[b] ):
literal[string]
identifier[maxit] = literal[int]
identifier[m] = identifier[int] ( identifier[fix] (( identifier[n] + literal[int] )/ literal[int] ))
identifier[xm] = literal[int] *( identifier[b] + identifier[a] )
identifier[xl] = literal[int] *( identifier[b] - identifier[a] )
identifier[nodes] = identifier[np] . identifier[zeros] ( identifier[n] )
identifier[weights] = identifier[nodes] . identifier[copy] ()
identifier[i] = identifier[np] . identifier[arange] ( identifier[m] )
identifier[z] = identifier[np] . identifier[cos] ( identifier[np] . identifier[pi] *(( identifier[i] + literal[int] )- literal[int] )/( identifier[n] + literal[int] ))
keyword[for] identifier[its] keyword[in] identifier[range] ( identifier[maxit] ):
identifier[p1] = identifier[np] . identifier[ones_like] ( identifier[z] )
identifier[p2] = identifier[np] . identifier[zeros_like] ( identifier[z] )
keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[n] + literal[int] ):
identifier[p3] = identifier[p2]
identifier[p2] = identifier[p1]
identifier[p1] =(( literal[int] * identifier[j] - literal[int] )* identifier[z] * identifier[p2] -( identifier[j] - literal[int] )* identifier[p3] )/ identifier[j]
identifier[pp] = identifier[n] *( identifier[z] * identifier[p1] - identifier[p2] )/( identifier[z] * identifier[z] - literal[int] )
identifier[z1] = identifier[z] . identifier[copy] ()
identifier[z] = identifier[z1] - identifier[p1] / identifier[pp]
keyword[if] identifier[np] . identifier[all] ( identifier[np] . identifier[abs] ( identifier[z] - identifier[z1] )< literal[int] ):
keyword[break]
keyword[if] identifier[its] == identifier[maxit] - literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[nodes] [ identifier[i] ]= identifier[xm] - identifier[xl] * identifier[z]
identifier[nodes] [- identifier[i] - literal[int] ]= identifier[xm] + identifier[xl] * identifier[z]
identifier[weights] [ identifier[i] ]= literal[int] * identifier[xl] /(( literal[int] - identifier[z] * identifier[z] )* identifier[pp] * identifier[pp] )
identifier[weights] [- identifier[i] - literal[int] ]= identifier[weights] [ identifier[i] ]
keyword[return] identifier[nodes] , identifier[weights] | def _qnwlege1(n, a, b):
"""
Compute univariate Guass-Legendre quadrature nodes and weights
Parameters
----------
n : int
The number of nodes
a : int
The lower endpoint
b : int
The upper endpoint
Returns
-------
nodes : np.ndarray(dtype=float)
An n element array of nodes
nodes : np.ndarray(dtype=float)
An n element array of weights
Notes
-----
Based of original function ``qnwlege1`` in CompEcon toolbox by
Miranda and Fackler
References
----------
Miranda, Mario J, and Paul L Fackler. Applied Computational
Economics and Finance, MIT Press, 2002.
"""
# import ipdb; ipdb.set_trace()
maxit = 100
m = int(fix((n + 1) / 2.0))
xm = 0.5 * (b + a)
xl = 0.5 * (b - a)
nodes = np.zeros(n)
weights = nodes.copy()
i = np.arange(m)
z = np.cos(np.pi * (i + 1.0 - 0.25) / (n + 0.5))
for its in range(maxit):
p1 = np.ones_like(z)
p2 = np.zeros_like(z)
for j in range(1, n + 1):
p3 = p2
p2 = p1
p1 = ((2 * j - 1) * z * p2 - (j - 1) * p3) / j # depends on [control=['for'], data=['j']]
pp = n * (z * p1 - p2) / (z * z - 1.0)
z1 = z.copy()
z = z1 - p1 / pp
if np.all(np.abs(z - z1) < 1e-14):
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if its == maxit - 1:
raise ValueError('Maximum iterations in _qnwlege1') # depends on [control=['if'], data=[]]
nodes[i] = xm - xl * z
nodes[-i - 1] = xm + xl * z
weights[i] = 2 * xl / ((1 - z * z) * pp * pp)
weights[-i - 1] = weights[i]
return (nodes, weights) |
def _append_distances(self, v, distance, candidates):
""" Apply distance implementation if specified """
if distance:
# Normalize vector (stored vectors are normalized)
nv = unitvec(v)
candidates = [(x[0], x[1], self.distance.distance(x[0], nv)) for x
in candidates]
return candidates | def function[_append_distances, parameter[self, v, distance, candidates]]:
constant[ Apply distance implementation if specified ]
if name[distance] begin[:]
variable[nv] assign[=] call[name[unitvec], parameter[name[v]]]
variable[candidates] assign[=] <ast.ListComp object at 0x7da1b088a140>
return[name[candidates]] | keyword[def] identifier[_append_distances] ( identifier[self] , identifier[v] , identifier[distance] , identifier[candidates] ):
literal[string]
keyword[if] identifier[distance] :
identifier[nv] = identifier[unitvec] ( identifier[v] )
identifier[candidates] =[( identifier[x] [ literal[int] ], identifier[x] [ literal[int] ], identifier[self] . identifier[distance] . identifier[distance] ( identifier[x] [ literal[int] ], identifier[nv] )) keyword[for] identifier[x]
keyword[in] identifier[candidates] ]
keyword[return] identifier[candidates] | def _append_distances(self, v, distance, candidates):
""" Apply distance implementation if specified """
if distance:
# Normalize vector (stored vectors are normalized)
nv = unitvec(v)
candidates = [(x[0], x[1], self.distance.distance(x[0], nv)) for x in candidates] # depends on [control=['if'], data=[]]
return candidates |
def calibrate_percentile_ranks(allele, predictor, peptides=None):
"""
Private helper function.
"""
global GLOBAL_DATA
if peptides is None:
peptides = GLOBAL_DATA["calibration_peptides"]
predictor.calibrate_percentile_ranks(
peptides=peptides,
alleles=[allele])
return {
allele: predictor.allele_to_percent_rank_transform[allele],
} | def function[calibrate_percentile_ranks, parameter[allele, predictor, peptides]]:
constant[
Private helper function.
]
<ast.Global object at 0x7da1b124d150>
if compare[name[peptides] is constant[None]] begin[:]
variable[peptides] assign[=] call[name[GLOBAL_DATA]][constant[calibration_peptides]]
call[name[predictor].calibrate_percentile_ranks, parameter[]]
return[dictionary[[<ast.Name object at 0x7da1b12b40a0>], [<ast.Subscript object at 0x7da1b12b43a0>]]] | keyword[def] identifier[calibrate_percentile_ranks] ( identifier[allele] , identifier[predictor] , identifier[peptides] = keyword[None] ):
literal[string]
keyword[global] identifier[GLOBAL_DATA]
keyword[if] identifier[peptides] keyword[is] keyword[None] :
identifier[peptides] = identifier[GLOBAL_DATA] [ literal[string] ]
identifier[predictor] . identifier[calibrate_percentile_ranks] (
identifier[peptides] = identifier[peptides] ,
identifier[alleles] =[ identifier[allele] ])
keyword[return] {
identifier[allele] : identifier[predictor] . identifier[allele_to_percent_rank_transform] [ identifier[allele] ],
} | def calibrate_percentile_ranks(allele, predictor, peptides=None):
"""
Private helper function.
"""
global GLOBAL_DATA
if peptides is None:
peptides = GLOBAL_DATA['calibration_peptides'] # depends on [control=['if'], data=['peptides']]
predictor.calibrate_percentile_ranks(peptides=peptides, alleles=[allele])
return {allele: predictor.allele_to_percent_rank_transform[allele]} |
def _run(method, cmd, cwd=None, shell=True, universal_newlines=True,
stderr=STDOUT):
"""Internal wrapper for `call` amd `check_output`"""
if not cmd:
error_msg = 'Passed empty text or list'
raise AttributeError(error_msg)
if isinstance(cmd, six.string_types):
cmd = str(cmd)
if shell:
if isinstance(cmd, list):
cmd = ' '.join(cmd)
else:
if isinstance(cmd, str):
cmd = cmd.strip().split()
out = method(cmd, shell=shell, cwd=cwd, stderr=stderr,
universal_newlines=universal_newlines)
if isinstance(out, bytes):
out = out.decode('utf-8')
return str(out).strip() | def function[_run, parameter[method, cmd, cwd, shell, universal_newlines, stderr]]:
constant[Internal wrapper for `call` amd `check_output`]
if <ast.UnaryOp object at 0x7da1b19ee320> begin[:]
variable[error_msg] assign[=] constant[Passed empty text or list]
<ast.Raise object at 0x7da1b194c9a0>
if call[name[isinstance], parameter[name[cmd], name[six].string_types]] begin[:]
variable[cmd] assign[=] call[name[str], parameter[name[cmd]]]
if name[shell] begin[:]
if call[name[isinstance], parameter[name[cmd], name[list]]] begin[:]
variable[cmd] assign[=] call[constant[ ].join, parameter[name[cmd]]]
variable[out] assign[=] call[name[method], parameter[name[cmd]]]
if call[name[isinstance], parameter[name[out], name[bytes]]] begin[:]
variable[out] assign[=] call[name[out].decode, parameter[constant[utf-8]]]
return[call[call[name[str], parameter[name[out]]].strip, parameter[]]] | keyword[def] identifier[_run] ( identifier[method] , identifier[cmd] , identifier[cwd] = keyword[None] , identifier[shell] = keyword[True] , identifier[universal_newlines] = keyword[True] ,
identifier[stderr] = identifier[STDOUT] ):
literal[string]
keyword[if] keyword[not] identifier[cmd] :
identifier[error_msg] = literal[string]
keyword[raise] identifier[AttributeError] ( identifier[error_msg] )
keyword[if] identifier[isinstance] ( identifier[cmd] , identifier[six] . identifier[string_types] ):
identifier[cmd] = identifier[str] ( identifier[cmd] )
keyword[if] identifier[shell] :
keyword[if] identifier[isinstance] ( identifier[cmd] , identifier[list] ):
identifier[cmd] = literal[string] . identifier[join] ( identifier[cmd] )
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[cmd] , identifier[str] ):
identifier[cmd] = identifier[cmd] . identifier[strip] (). identifier[split] ()
identifier[out] = identifier[method] ( identifier[cmd] , identifier[shell] = identifier[shell] , identifier[cwd] = identifier[cwd] , identifier[stderr] = identifier[stderr] ,
identifier[universal_newlines] = identifier[universal_newlines] )
keyword[if] identifier[isinstance] ( identifier[out] , identifier[bytes] ):
identifier[out] = identifier[out] . identifier[decode] ( literal[string] )
keyword[return] identifier[str] ( identifier[out] ). identifier[strip] () | def _run(method, cmd, cwd=None, shell=True, universal_newlines=True, stderr=STDOUT):
"""Internal wrapper for `call` amd `check_output`"""
if not cmd:
error_msg = 'Passed empty text or list'
raise AttributeError(error_msg) # depends on [control=['if'], data=[]]
if isinstance(cmd, six.string_types):
cmd = str(cmd) # depends on [control=['if'], data=[]]
if shell:
if isinstance(cmd, list):
cmd = ' '.join(cmd) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(cmd, str):
cmd = cmd.strip().split() # depends on [control=['if'], data=[]]
out = method(cmd, shell=shell, cwd=cwd, stderr=stderr, universal_newlines=universal_newlines)
if isinstance(out, bytes):
out = out.decode('utf-8') # depends on [control=['if'], data=[]]
return str(out).strip() |
def _reindex_with_indexers(self, reindexers, fill_value=None, copy=False,
allow_dups=False):
"""allow_dups indicates an internal call here """
# reindex doing multiple operations on different axes if indicated
new_data = self._data
for axis in sorted(reindexers.keys()):
index, indexer = reindexers[axis]
baxis = self._get_block_manager_axis(axis)
if index is None:
continue
index = ensure_index(index)
if indexer is not None:
indexer = ensure_int64(indexer)
# TODO: speed up on homogeneous DataFrame objects
new_data = new_data.reindex_indexer(index, indexer, axis=baxis,
fill_value=fill_value,
allow_dups=allow_dups,
copy=copy)
if copy and new_data is self._data:
new_data = new_data.copy()
return self._constructor(new_data).__finalize__(self) | def function[_reindex_with_indexers, parameter[self, reindexers, fill_value, copy, allow_dups]]:
constant[allow_dups indicates an internal call here ]
variable[new_data] assign[=] name[self]._data
for taget[name[axis]] in starred[call[name[sorted], parameter[call[name[reindexers].keys, parameter[]]]]] begin[:]
<ast.Tuple object at 0x7da20e9b35e0> assign[=] call[name[reindexers]][name[axis]]
variable[baxis] assign[=] call[name[self]._get_block_manager_axis, parameter[name[axis]]]
if compare[name[index] is constant[None]] begin[:]
continue
variable[index] assign[=] call[name[ensure_index], parameter[name[index]]]
if compare[name[indexer] is_not constant[None]] begin[:]
variable[indexer] assign[=] call[name[ensure_int64], parameter[name[indexer]]]
variable[new_data] assign[=] call[name[new_data].reindex_indexer, parameter[name[index], name[indexer]]]
if <ast.BoolOp object at 0x7da20e9b1300> begin[:]
variable[new_data] assign[=] call[name[new_data].copy, parameter[]]
return[call[call[name[self]._constructor, parameter[name[new_data]]].__finalize__, parameter[name[self]]]] | keyword[def] identifier[_reindex_with_indexers] ( identifier[self] , identifier[reindexers] , identifier[fill_value] = keyword[None] , identifier[copy] = keyword[False] ,
identifier[allow_dups] = keyword[False] ):
literal[string]
identifier[new_data] = identifier[self] . identifier[_data]
keyword[for] identifier[axis] keyword[in] identifier[sorted] ( identifier[reindexers] . identifier[keys] ()):
identifier[index] , identifier[indexer] = identifier[reindexers] [ identifier[axis] ]
identifier[baxis] = identifier[self] . identifier[_get_block_manager_axis] ( identifier[axis] )
keyword[if] identifier[index] keyword[is] keyword[None] :
keyword[continue]
identifier[index] = identifier[ensure_index] ( identifier[index] )
keyword[if] identifier[indexer] keyword[is] keyword[not] keyword[None] :
identifier[indexer] = identifier[ensure_int64] ( identifier[indexer] )
identifier[new_data] = identifier[new_data] . identifier[reindex_indexer] ( identifier[index] , identifier[indexer] , identifier[axis] = identifier[baxis] ,
identifier[fill_value] = identifier[fill_value] ,
identifier[allow_dups] = identifier[allow_dups] ,
identifier[copy] = identifier[copy] )
keyword[if] identifier[copy] keyword[and] identifier[new_data] keyword[is] identifier[self] . identifier[_data] :
identifier[new_data] = identifier[new_data] . identifier[copy] ()
keyword[return] identifier[self] . identifier[_constructor] ( identifier[new_data] ). identifier[__finalize__] ( identifier[self] ) | def _reindex_with_indexers(self, reindexers, fill_value=None, copy=False, allow_dups=False):
"""allow_dups indicates an internal call here """
# reindex doing multiple operations on different axes if indicated
new_data = self._data
for axis in sorted(reindexers.keys()):
(index, indexer) = reindexers[axis]
baxis = self._get_block_manager_axis(axis)
if index is None:
continue # depends on [control=['if'], data=[]]
index = ensure_index(index)
if indexer is not None:
indexer = ensure_int64(indexer) # depends on [control=['if'], data=['indexer']]
# TODO: speed up on homogeneous DataFrame objects
new_data = new_data.reindex_indexer(index, indexer, axis=baxis, fill_value=fill_value, allow_dups=allow_dups, copy=copy) # depends on [control=['for'], data=['axis']]
if copy and new_data is self._data:
new_data = new_data.copy() # depends on [control=['if'], data=[]]
return self._constructor(new_data).__finalize__(self) |
def clean(self):
"""
Clean up the stale connections in all of the pools, and then
get rid of empty pools. Pools clean themselves every time a
connection is fetched; this cleaning takes care of pools that
aren't being used any more, so nothing is being gotten from
them.
"""
with self.mutex:
now = time.time()
if self.last_clean_time + self.CLEAN_INTERVAL < now:
to_remove = []
for (host, pool) in self.host_to_pool.items():
pool.clean()
if pool.size() == 0:
to_remove.append(host)
for host in to_remove:
del self.host_to_pool[host]
self.last_clean_time = now | def function[clean, parameter[self]]:
constant[
Clean up the stale connections in all of the pools, and then
get rid of empty pools. Pools clean themselves every time a
connection is fetched; this cleaning takes care of pools that
aren't being used any more, so nothing is being gotten from
them.
]
with name[self].mutex begin[:]
variable[now] assign[=] call[name[time].time, parameter[]]
if compare[binary_operation[name[self].last_clean_time + name[self].CLEAN_INTERVAL] less[<] name[now]] begin[:]
variable[to_remove] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b265b0a0>, <ast.Name object at 0x7da1b265a4a0>]]] in starred[call[name[self].host_to_pool.items, parameter[]]] begin[:]
call[name[pool].clean, parameter[]]
if compare[call[name[pool].size, parameter[]] equal[==] constant[0]] begin[:]
call[name[to_remove].append, parameter[name[host]]]
for taget[name[host]] in starred[name[to_remove]] begin[:]
<ast.Delete object at 0x7da1b2658dc0>
name[self].last_clean_time assign[=] name[now] | keyword[def] identifier[clean] ( identifier[self] ):
literal[string]
keyword[with] identifier[self] . identifier[mutex] :
identifier[now] = identifier[time] . identifier[time] ()
keyword[if] identifier[self] . identifier[last_clean_time] + identifier[self] . identifier[CLEAN_INTERVAL] < identifier[now] :
identifier[to_remove] =[]
keyword[for] ( identifier[host] , identifier[pool] ) keyword[in] identifier[self] . identifier[host_to_pool] . identifier[items] ():
identifier[pool] . identifier[clean] ()
keyword[if] identifier[pool] . identifier[size] ()== literal[int] :
identifier[to_remove] . identifier[append] ( identifier[host] )
keyword[for] identifier[host] keyword[in] identifier[to_remove] :
keyword[del] identifier[self] . identifier[host_to_pool] [ identifier[host] ]
identifier[self] . identifier[last_clean_time] = identifier[now] | def clean(self):
"""
Clean up the stale connections in all of the pools, and then
get rid of empty pools. Pools clean themselves every time a
connection is fetched; this cleaning takes care of pools that
aren't being used any more, so nothing is being gotten from
them.
"""
with self.mutex:
now = time.time()
if self.last_clean_time + self.CLEAN_INTERVAL < now:
to_remove = []
for (host, pool) in self.host_to_pool.items():
pool.clean()
if pool.size() == 0:
to_remove.append(host) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
for host in to_remove:
del self.host_to_pool[host] # depends on [control=['for'], data=['host']]
self.last_clean_time = now # depends on [control=['if'], data=['now']] # depends on [control=['with'], data=[]] |
def img_search_google(album):
'''
google image search
'''
album = album + " Album Art"
url = ("https://www.google.com/search?q=" +
quote(album.encode('utf-8')) + "&source=lnms&tbm=isch")
header = {'User-Agent':
'''Mozilla/5.0 (Windows NT 6.1; WOW64)
AppleWebKit/537.36 (KHTML,like Gecko)
Chrome/43.0.2357.134 Safari/537.36'''
}
soup = BeautifulSoup(urlopen(Request(url, headers=header)), "html.parser")
albumart_div = soup.find("div", {"class": "rg_meta"})
albumart = json.loads(albumart_div.text)["ou"]
return albumart | def function[img_search_google, parameter[album]]:
constant[
google image search
]
variable[album] assign[=] binary_operation[name[album] + constant[ Album Art]]
variable[url] assign[=] binary_operation[binary_operation[constant[https://www.google.com/search?q=] + call[name[quote], parameter[call[name[album].encode, parameter[constant[utf-8]]]]]] + constant[&source=lnms&tbm=isch]]
variable[header] assign[=] dictionary[[<ast.Constant object at 0x7da20e9b0d00>], [<ast.Constant object at 0x7da20e9b2440>]]
variable[soup] assign[=] call[name[BeautifulSoup], parameter[call[name[urlopen], parameter[call[name[Request], parameter[name[url]]]]], constant[html.parser]]]
variable[albumart_div] assign[=] call[name[soup].find, parameter[constant[div], dictionary[[<ast.Constant object at 0x7da20e9b2980>], [<ast.Constant object at 0x7da20e9b20b0>]]]]
variable[albumart] assign[=] call[call[name[json].loads, parameter[name[albumart_div].text]]][constant[ou]]
return[name[albumart]] | keyword[def] identifier[img_search_google] ( identifier[album] ):
literal[string]
identifier[album] = identifier[album] + literal[string]
identifier[url] =( literal[string] +
identifier[quote] ( identifier[album] . identifier[encode] ( literal[string] ))+ literal[string] )
identifier[header] ={ literal[string] :
literal[string]
}
identifier[soup] = identifier[BeautifulSoup] ( identifier[urlopen] ( identifier[Request] ( identifier[url] , identifier[headers] = identifier[header] )), literal[string] )
identifier[albumart_div] = identifier[soup] . identifier[find] ( literal[string] ,{ literal[string] : literal[string] })
identifier[albumart] = identifier[json] . identifier[loads] ( identifier[albumart_div] . identifier[text] )[ literal[string] ]
keyword[return] identifier[albumart] | def img_search_google(album):
"""
google image search
"""
album = album + ' Album Art'
url = 'https://www.google.com/search?q=' + quote(album.encode('utf-8')) + '&source=lnms&tbm=isch'
header = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64)\n AppleWebKit/537.36 (KHTML,like Gecko)\n Chrome/43.0.2357.134 Safari/537.36'}
soup = BeautifulSoup(urlopen(Request(url, headers=header)), 'html.parser')
albumart_div = soup.find('div', {'class': 'rg_meta'})
albumart = json.loads(albumart_div.text)['ou']
return albumart |
def create_index(self, columns, table_name, if_not_exists = True, unique = False, **kwargs):
'Create a unique index on the column(s) passed.'
index_name = simplify(table_name) + u'_' + u'_'.join(map(simplify, columns))
if unique:
sql = u'CREATE UNIQUE INDEX %s ON %s (%s)'
else:
sql = u'CREATE INDEX %s ON %s (%s)'
first_param = u'IF NOT EXISTS ' + index_name if if_not_exists else index_name
params = (first_param, quote(table_name), ','.join(map(quote, columns)))
self.execute(sql % params, **kwargs) | def function[create_index, parameter[self, columns, table_name, if_not_exists, unique]]:
constant[Create a unique index on the column(s) passed.]
variable[index_name] assign[=] binary_operation[binary_operation[call[name[simplify], parameter[name[table_name]]] + constant[_]] + call[constant[_].join, parameter[call[name[map], parameter[name[simplify], name[columns]]]]]]
if name[unique] begin[:]
variable[sql] assign[=] constant[CREATE UNIQUE INDEX %s ON %s (%s)]
variable[first_param] assign[=] <ast.IfExp object at 0x7da20c993790>
variable[params] assign[=] tuple[[<ast.Name object at 0x7da20c9914b0>, <ast.Call object at 0x7da20c990910>, <ast.Call object at 0x7da20c991c30>]]
call[name[self].execute, parameter[binary_operation[name[sql] <ast.Mod object at 0x7da2590d6920> name[params]]]] | keyword[def] identifier[create_index] ( identifier[self] , identifier[columns] , identifier[table_name] , identifier[if_not_exists] = keyword[True] , identifier[unique] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[index_name] = identifier[simplify] ( identifier[table_name] )+ literal[string] + literal[string] . identifier[join] ( identifier[map] ( identifier[simplify] , identifier[columns] ))
keyword[if] identifier[unique] :
identifier[sql] = literal[string]
keyword[else] :
identifier[sql] = literal[string]
identifier[first_param] = literal[string] + identifier[index_name] keyword[if] identifier[if_not_exists] keyword[else] identifier[index_name]
identifier[params] =( identifier[first_param] , identifier[quote] ( identifier[table_name] ), literal[string] . identifier[join] ( identifier[map] ( identifier[quote] , identifier[columns] )))
identifier[self] . identifier[execute] ( identifier[sql] % identifier[params] ,** identifier[kwargs] ) | def create_index(self, columns, table_name, if_not_exists=True, unique=False, **kwargs):
"""Create a unique index on the column(s) passed."""
index_name = simplify(table_name) + u'_' + u'_'.join(map(simplify, columns))
if unique:
sql = u'CREATE UNIQUE INDEX %s ON %s (%s)' # depends on [control=['if'], data=[]]
else:
sql = u'CREATE INDEX %s ON %s (%s)'
first_param = u'IF NOT EXISTS ' + index_name if if_not_exists else index_name
params = (first_param, quote(table_name), ','.join(map(quote, columns)))
self.execute(sql % params, **kwargs) |
def format_coord(self, x, y):
"""Format displayed coordinates during mouseover of axes."""
p, b = stereonet_math.geographic2plunge_bearing(x, y)
s, d = stereonet_math.geographic2pole(x, y)
pb = u'P/B={:0.0f}\u00b0/{:03.0f}\u00b0'.format(p[0], b[0])
sd = u'S/D={:03.0f}\u00b0/{:0.0f}\u00b0'.format(s[0], d[0])
return u'{}, {}'.format(pb, sd) | def function[format_coord, parameter[self, x, y]]:
constant[Format displayed coordinates during mouseover of axes.]
<ast.Tuple object at 0x7da1b184a110> assign[=] call[name[stereonet_math].geographic2plunge_bearing, parameter[name[x], name[y]]]
<ast.Tuple object at 0x7da1b184b670> assign[=] call[name[stereonet_math].geographic2pole, parameter[name[x], name[y]]]
variable[pb] assign[=] call[constant[P/B={:0.0f}°/{:03.0f}°].format, parameter[call[name[p]][constant[0]], call[name[b]][constant[0]]]]
variable[sd] assign[=] call[constant[S/D={:03.0f}°/{:0.0f}°].format, parameter[call[name[s]][constant[0]], call[name[d]][constant[0]]]]
return[call[constant[{}, {}].format, parameter[name[pb], name[sd]]]] | keyword[def] identifier[format_coord] ( identifier[self] , identifier[x] , identifier[y] ):
literal[string]
identifier[p] , identifier[b] = identifier[stereonet_math] . identifier[geographic2plunge_bearing] ( identifier[x] , identifier[y] )
identifier[s] , identifier[d] = identifier[stereonet_math] . identifier[geographic2pole] ( identifier[x] , identifier[y] )
identifier[pb] = literal[string] . identifier[format] ( identifier[p] [ literal[int] ], identifier[b] [ literal[int] ])
identifier[sd] = literal[string] . identifier[format] ( identifier[s] [ literal[int] ], identifier[d] [ literal[int] ])
keyword[return] literal[string] . identifier[format] ( identifier[pb] , identifier[sd] ) | def format_coord(self, x, y):
"""Format displayed coordinates during mouseover of axes."""
(p, b) = stereonet_math.geographic2plunge_bearing(x, y)
(s, d) = stereonet_math.geographic2pole(x, y)
pb = u'P/B={:0.0f}°/{:03.0f}°'.format(p[0], b[0])
sd = u'S/D={:03.0f}°/{:0.0f}°'.format(s[0], d[0])
return u'{}, {}'.format(pb, sd) |
def h1_mhe(simulated_array, observed_array, replace_nan=None, replace_inf=None,
remove_neg=False, remove_zero=False):
"""Compute the H1 mean error.
.. image:: /pictures/H1.png
.. image:: /pictures/MHE.png
**Range:**
**Notes:**
Parameters
----------
simulated_array: one dimensional ndarray
An array of simulated data from the time series.
observed_array: one dimensional ndarray
An array of observed data from the time series.
replace_nan: float, optional
If given, indicates which value to replace NaN values with in the two arrays. If None, when
a NaN value is found at the i-th position in the observed OR simulated array, the i-th value
of the observed and simulated array are removed before the computation.
replace_inf: float, optional
If given, indicates which value to replace Inf values with in the two arrays. If None, when
an inf value is found at the i-th position in the observed OR simulated array, the i-th
value of the observed and simulated array are removed before the computation.
remove_neg: boolean, optional
If True, when a negative value is found at the i-th position in the observed OR simulated
array, the i-th value of the observed AND simulated array are removed before the
computation.
remove_zero: boolean, optional
If true, when a zero value is found at the i-th position in the observed OR simulated
array, the i-th value of the observed AND simulated array are removed before the
computation.
Returns
-------
float
The mean H1 error.
Examples
--------
>>> import HydroErr as he
>>> import numpy as np
>>> sim = np.array([5, 7, 9, 2, 4.5, 6.7])
>>> obs = np.array([4.7, 6, 10, 2.5, 4, 7])
>>> he.h1_mhe(sim, obs)
0.002106551840594386
References
----------
- Tornquist, L., Vartia, P., Vartia, Y.O., 1985. How Should Relative Changes be Measured?
The American Statistician 43-46.
"""
# Treats data
simulated_array, observed_array = treat_values(
simulated_array,
observed_array,
replace_nan=replace_nan,
replace_inf=replace_inf,
remove_neg=remove_neg,
remove_zero=remove_zero
)
h = (simulated_array - observed_array) / observed_array
return np.mean(h) | def function[h1_mhe, parameter[simulated_array, observed_array, replace_nan, replace_inf, remove_neg, remove_zero]]:
constant[Compute the H1 mean error.
.. image:: /pictures/H1.png
.. image:: /pictures/MHE.png
**Range:**
**Notes:**
Parameters
----------
simulated_array: one dimensional ndarray
An array of simulated data from the time series.
observed_array: one dimensional ndarray
An array of observed data from the time series.
replace_nan: float, optional
If given, indicates which value to replace NaN values with in the two arrays. If None, when
a NaN value is found at the i-th position in the observed OR simulated array, the i-th value
of the observed and simulated array are removed before the computation.
replace_inf: float, optional
If given, indicates which value to replace Inf values with in the two arrays. If None, when
an inf value is found at the i-th position in the observed OR simulated array, the i-th
value of the observed and simulated array are removed before the computation.
remove_neg: boolean, optional
If True, when a negative value is found at the i-th position in the observed OR simulated
array, the i-th value of the observed AND simulated array are removed before the
computation.
remove_zero: boolean, optional
If true, when a zero value is found at the i-th position in the observed OR simulated
array, the i-th value of the observed AND simulated array are removed before the
computation.
Returns
-------
float
The mean H1 error.
Examples
--------
>>> import HydroErr as he
>>> import numpy as np
>>> sim = np.array([5, 7, 9, 2, 4.5, 6.7])
>>> obs = np.array([4.7, 6, 10, 2.5, 4, 7])
>>> he.h1_mhe(sim, obs)
0.002106551840594386
References
----------
- Tornquist, L., Vartia, P., Vartia, Y.O., 1985. How Should Relative Changes be Measured?
The American Statistician 43-46.
]
<ast.Tuple object at 0x7da1b040e770> assign[=] call[name[treat_values], parameter[name[simulated_array], name[observed_array]]]
variable[h] assign[=] binary_operation[binary_operation[name[simulated_array] - name[observed_array]] / name[observed_array]]
return[call[name[np].mean, parameter[name[h]]]] | keyword[def] identifier[h1_mhe] ( identifier[simulated_array] , identifier[observed_array] , identifier[replace_nan] = keyword[None] , identifier[replace_inf] = keyword[None] ,
identifier[remove_neg] = keyword[False] , identifier[remove_zero] = keyword[False] ):
literal[string]
identifier[simulated_array] , identifier[observed_array] = identifier[treat_values] (
identifier[simulated_array] ,
identifier[observed_array] ,
identifier[replace_nan] = identifier[replace_nan] ,
identifier[replace_inf] = identifier[replace_inf] ,
identifier[remove_neg] = identifier[remove_neg] ,
identifier[remove_zero] = identifier[remove_zero]
)
identifier[h] =( identifier[simulated_array] - identifier[observed_array] )/ identifier[observed_array]
keyword[return] identifier[np] . identifier[mean] ( identifier[h] ) | def h1_mhe(simulated_array, observed_array, replace_nan=None, replace_inf=None, remove_neg=False, remove_zero=False):
"""Compute the H1 mean error.
.. image:: /pictures/H1.png
.. image:: /pictures/MHE.png
**Range:**
**Notes:**
Parameters
----------
simulated_array: one dimensional ndarray
An array of simulated data from the time series.
observed_array: one dimensional ndarray
An array of observed data from the time series.
replace_nan: float, optional
If given, indicates which value to replace NaN values with in the two arrays. If None, when
a NaN value is found at the i-th position in the observed OR simulated array, the i-th value
of the observed and simulated array are removed before the computation.
replace_inf: float, optional
If given, indicates which value to replace Inf values with in the two arrays. If None, when
an inf value is found at the i-th position in the observed OR simulated array, the i-th
value of the observed and simulated array are removed before the computation.
remove_neg: boolean, optional
If True, when a negative value is found at the i-th position in the observed OR simulated
array, the i-th value of the observed AND simulated array are removed before the
computation.
remove_zero: boolean, optional
If true, when a zero value is found at the i-th position in the observed OR simulated
array, the i-th value of the observed AND simulated array are removed before the
computation.
Returns
-------
float
The mean H1 error.
Examples
--------
>>> import HydroErr as he
>>> import numpy as np
>>> sim = np.array([5, 7, 9, 2, 4.5, 6.7])
>>> obs = np.array([4.7, 6, 10, 2.5, 4, 7])
>>> he.h1_mhe(sim, obs)
0.002106551840594386
References
----------
- Tornquist, L., Vartia, P., Vartia, Y.O., 1985. How Should Relative Changes be Measured?
The American Statistician 43-46.
"""
# Treats data
(simulated_array, observed_array) = treat_values(simulated_array, observed_array, replace_nan=replace_nan, replace_inf=replace_inf, remove_neg=remove_neg, remove_zero=remove_zero)
h = (simulated_array - observed_array) / observed_array
return np.mean(h) |
def make_request(method, resource, token=None, vault_url=None, get_token_url=False, **args):
'''
Make a request to Vault
'''
if not token or not vault_url:
connection = get_vault_connection()
token, vault_url = connection['token'], connection['url']
if 'verify' not in args:
args['verify'] = connection['verify']
url = "{0}/{1}".format(vault_url, resource)
headers = {'X-Vault-Token': token, 'Content-Type': 'application/json'}
response = requests.request(method, url, headers=headers, **args)
if get_token_url:
return response, token, vault_url
else:
return response | def function[make_request, parameter[method, resource, token, vault_url, get_token_url]]:
constant[
Make a request to Vault
]
if <ast.BoolOp object at 0x7da18ede5120> begin[:]
variable[connection] assign[=] call[name[get_vault_connection], parameter[]]
<ast.Tuple object at 0x7da18ede5a50> assign[=] tuple[[<ast.Subscript object at 0x7da18ede4be0>, <ast.Subscript object at 0x7da18ede75e0>]]
if compare[constant[verify] <ast.NotIn object at 0x7da2590d7190> name[args]] begin[:]
call[name[args]][constant[verify]] assign[=] call[name[connection]][constant[verify]]
variable[url] assign[=] call[constant[{0}/{1}].format, parameter[name[vault_url], name[resource]]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da207f018d0>, <ast.Constant object at 0x7da207f00370>], [<ast.Name object at 0x7da207f004c0>, <ast.Constant object at 0x7da207f03160>]]
variable[response] assign[=] call[name[requests].request, parameter[name[method], name[url]]]
if name[get_token_url] begin[:]
return[tuple[[<ast.Name object at 0x7da207f03e50>, <ast.Name object at 0x7da207f016c0>, <ast.Name object at 0x7da207f01f00>]]] | keyword[def] identifier[make_request] ( identifier[method] , identifier[resource] , identifier[token] = keyword[None] , identifier[vault_url] = keyword[None] , identifier[get_token_url] = keyword[False] ,** identifier[args] ):
literal[string]
keyword[if] keyword[not] identifier[token] keyword[or] keyword[not] identifier[vault_url] :
identifier[connection] = identifier[get_vault_connection] ()
identifier[token] , identifier[vault_url] = identifier[connection] [ literal[string] ], identifier[connection] [ literal[string] ]
keyword[if] literal[string] keyword[not] keyword[in] identifier[args] :
identifier[args] [ literal[string] ]= identifier[connection] [ literal[string] ]
identifier[url] = literal[string] . identifier[format] ( identifier[vault_url] , identifier[resource] )
identifier[headers] ={ literal[string] : identifier[token] , literal[string] : literal[string] }
identifier[response] = identifier[requests] . identifier[request] ( identifier[method] , identifier[url] , identifier[headers] = identifier[headers] ,** identifier[args] )
keyword[if] identifier[get_token_url] :
keyword[return] identifier[response] , identifier[token] , identifier[vault_url]
keyword[else] :
keyword[return] identifier[response] | def make_request(method, resource, token=None, vault_url=None, get_token_url=False, **args):
"""
Make a request to Vault
"""
if not token or not vault_url:
connection = get_vault_connection()
(token, vault_url) = (connection['token'], connection['url'])
if 'verify' not in args:
args['verify'] = connection['verify'] # depends on [control=['if'], data=['args']] # depends on [control=['if'], data=[]]
url = '{0}/{1}'.format(vault_url, resource)
headers = {'X-Vault-Token': token, 'Content-Type': 'application/json'}
response = requests.request(method, url, headers=headers, **args)
if get_token_url:
return (response, token, vault_url) # depends on [control=['if'], data=[]]
else:
return response |
def set_hash_value(self, key, field, value, pipeline=False):
"""Set the value of field in a hash stored at key.
Args:
key (str): key (name) of the hash
field (str): Field within the hash to set
value: Value to set
pipeline (bool): True, start a transaction block. Default false.
"""
# FIXME(BMo): new name for this function -> save_dict_value ?
if pipeline:
self._pipeline.hset(key, field, str(value))
else:
self._db.hset(key, field, str(value)) | def function[set_hash_value, parameter[self, key, field, value, pipeline]]:
constant[Set the value of field in a hash stored at key.
Args:
key (str): key (name) of the hash
field (str): Field within the hash to set
value: Value to set
pipeline (bool): True, start a transaction block. Default false.
]
if name[pipeline] begin[:]
call[name[self]._pipeline.hset, parameter[name[key], name[field], call[name[str], parameter[name[value]]]]] | keyword[def] identifier[set_hash_value] ( identifier[self] , identifier[key] , identifier[field] , identifier[value] , identifier[pipeline] = keyword[False] ):
literal[string]
keyword[if] identifier[pipeline] :
identifier[self] . identifier[_pipeline] . identifier[hset] ( identifier[key] , identifier[field] , identifier[str] ( identifier[value] ))
keyword[else] :
identifier[self] . identifier[_db] . identifier[hset] ( identifier[key] , identifier[field] , identifier[str] ( identifier[value] )) | def set_hash_value(self, key, field, value, pipeline=False):
"""Set the value of field in a hash stored at key.
Args:
key (str): key (name) of the hash
field (str): Field within the hash to set
value: Value to set
pipeline (bool): True, start a transaction block. Default false.
"""
# FIXME(BMo): new name for this function -> save_dict_value ?
if pipeline:
self._pipeline.hset(key, field, str(value)) # depends on [control=['if'], data=[]]
else:
self._db.hset(key, field, str(value)) |
def parse_files(self, req, name, field):
"""Pull a file from the request."""
files = ((k, v) for k, v in req.POST.items() if hasattr(v, "file"))
return core.get_value(MultiDict(files), name, field) | def function[parse_files, parameter[self, req, name, field]]:
constant[Pull a file from the request.]
variable[files] assign[=] <ast.GeneratorExp object at 0x7da1b22e8490>
return[call[name[core].get_value, parameter[call[name[MultiDict], parameter[name[files]]], name[name], name[field]]]] | keyword[def] identifier[parse_files] ( identifier[self] , identifier[req] , identifier[name] , identifier[field] ):
literal[string]
identifier[files] =(( identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[req] . identifier[POST] . identifier[items] () keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ))
keyword[return] identifier[core] . identifier[get_value] ( identifier[MultiDict] ( identifier[files] ), identifier[name] , identifier[field] ) | def parse_files(self, req, name, field):
"""Pull a file from the request."""
files = ((k, v) for (k, v) in req.POST.items() if hasattr(v, 'file'))
return core.get_value(MultiDict(files), name, field) |
def process_response(self, req, resp, resource):
""" Post-processing of the response (after routing).
We attempt to follow many of the 'hardening reponse headers'
best practices. These include things like:
HPKP: HTTP Public Key Pinning
HSTS: HTTP Srict Transport Security
X-Content-Type-Options:
prevents mime-sniffing so user generated content
isn't auto determined as something it shouldn't be.
XSS Protection: built in reflective XSS protection
"""
hpkp = goldman.config.HPKP
if hpkp:
resp.set_header(
'Public-Key-Pins',
'{}; includeSubdomains; max-age=31536000'.format(hpkp),
)
resp.set_header(
'Strict-Transport-Security',
'max-age=31536000; includeSubDomains',
)
resp.set_header(
'X-Content-Type-Options',
'nosniff',
)
resp.set_header(
'X-Xss-Protection',
'1; mode=block',
) | def function[process_response, parameter[self, req, resp, resource]]:
constant[ Post-processing of the response (after routing).
We attempt to follow many of the 'hardening reponse headers'
best practices. These include things like:
HPKP: HTTP Public Key Pinning
HSTS: HTTP Srict Transport Security
X-Content-Type-Options:
prevents mime-sniffing so user generated content
isn't auto determined as something it shouldn't be.
XSS Protection: built in reflective XSS protection
]
variable[hpkp] assign[=] name[goldman].config.HPKP
if name[hpkp] begin[:]
call[name[resp].set_header, parameter[constant[Public-Key-Pins], call[constant[{}; includeSubdomains; max-age=31536000].format, parameter[name[hpkp]]]]]
call[name[resp].set_header, parameter[constant[Strict-Transport-Security], constant[max-age=31536000; includeSubDomains]]]
call[name[resp].set_header, parameter[constant[X-Content-Type-Options], constant[nosniff]]]
call[name[resp].set_header, parameter[constant[X-Xss-Protection], constant[1; mode=block]]] | keyword[def] identifier[process_response] ( identifier[self] , identifier[req] , identifier[resp] , identifier[resource] ):
literal[string]
identifier[hpkp] = identifier[goldman] . identifier[config] . identifier[HPKP]
keyword[if] identifier[hpkp] :
identifier[resp] . identifier[set_header] (
literal[string] ,
literal[string] . identifier[format] ( identifier[hpkp] ),
)
identifier[resp] . identifier[set_header] (
literal[string] ,
literal[string] ,
)
identifier[resp] . identifier[set_header] (
literal[string] ,
literal[string] ,
)
identifier[resp] . identifier[set_header] (
literal[string] ,
literal[string] ,
) | def process_response(self, req, resp, resource):
""" Post-processing of the response (after routing).
We attempt to follow many of the 'hardening reponse headers'
best practices. These include things like:
HPKP: HTTP Public Key Pinning
HSTS: HTTP Srict Transport Security
X-Content-Type-Options:
prevents mime-sniffing so user generated content
isn't auto determined as something it shouldn't be.
XSS Protection: built in reflective XSS protection
"""
hpkp = goldman.config.HPKP
if hpkp:
resp.set_header('Public-Key-Pins', '{}; includeSubdomains; max-age=31536000'.format(hpkp)) # depends on [control=['if'], data=[]]
resp.set_header('Strict-Transport-Security', 'max-age=31536000; includeSubDomains')
resp.set_header('X-Content-Type-Options', 'nosniff')
resp.set_header('X-Xss-Protection', '1; mode=block') |
def _drawContents(self, reason=None, initiator=None):
""" Draws the plot contents from the sliced array of the collected repo tree item.
The reason parameter is used to determine if the axes will be reset (the initiator
parameter is ignored). See AbstractInspector.updateContents for their description.
"""
self.slicedArray = self.collector.getSlicedArray()
if not self._hasValidData():
self._clearContents()
raise InvalidDataError("No data available or it does not contain real numbers")
# -- Valid plot data from here on --
# PyQtGraph doesn't handle masked arrays so we convert the masked values to Nans (missing
# data values are replaced by NaNs). The PyQtGraph line plot omits the Nans, which is great.
self.slicedArray.replaceMaskedValueWithNan() # will convert data to float if int
self.plotItem.clear()
# Reset the axes ranges (via the config)
if (reason == UpdateReason.RTI_CHANGED or
reason == UpdateReason.COLLECTOR_COMBO_BOX):
# self.config.yAxisRangeCti.setAutoRangeOn() doesn't work as refreshBlocked is True
# TODO: can refreshBlocked maybe only block the signals to prevent loops?
self.config.xAxisRangeCti.autoRangeCti.data = True
self.config.yAxisRangeCti.autoRangeCti.data = True
self.titleLabel.setText(self.configValue('title').format(**self.collector.rtiInfo))
connected = np.isfinite(self.slicedArray.data)
if is_an_array(self.slicedArray.mask):
connected = np.logical_and(connected, ~self.slicedArray.mask)
else:
connected = np.zeros_like(self.slicedArray.data) if self.slicedArray.mask else connected
plotDataItem = self.config.plotDataItemCti.createPlotDataItem()
plotDataItem.setData(self.slicedArray.data, connect=connected)
self.plotItem.addItem(plotDataItem)
if self.config.probeCti.configValue:
self.probeLabel.setVisible(True)
self.plotItem.addItem(self.crossLineVerShadow, ignoreBounds=True)
self.plotItem.addItem(self.crossLineVertical, ignoreBounds=True)
self.plotItem.addItem(self.probeDataItem, ignoreBounds=True)
self.probeDataItem.setSymbolBrush(QtGui.QBrush(self.config.plotDataItemCti.penColor))
self.probeDataItem.setSymbolSize(10)
else:
self.probeLabel.setVisible(False)
# Update the config tree from the (possibly) new state of the PgLinePlot1d inspector,
# e.g. the axis range may have changed while drawing.
self.config.updateTarget() | def function[_drawContents, parameter[self, reason, initiator]]:
constant[ Draws the plot contents from the sliced array of the collected repo tree item.
The reason parameter is used to determine if the axes will be reset (the initiator
parameter is ignored). See AbstractInspector.updateContents for their description.
]
name[self].slicedArray assign[=] call[name[self].collector.getSlicedArray, parameter[]]
if <ast.UnaryOp object at 0x7da1b04161a0> begin[:]
call[name[self]._clearContents, parameter[]]
<ast.Raise object at 0x7da1b04166b0>
call[name[self].slicedArray.replaceMaskedValueWithNan, parameter[]]
call[name[self].plotItem.clear, parameter[]]
if <ast.BoolOp object at 0x7da1b0417b80> begin[:]
name[self].config.xAxisRangeCti.autoRangeCti.data assign[=] constant[True]
name[self].config.yAxisRangeCti.autoRangeCti.data assign[=] constant[True]
call[name[self].titleLabel.setText, parameter[call[call[name[self].configValue, parameter[constant[title]]].format, parameter[]]]]
variable[connected] assign[=] call[name[np].isfinite, parameter[name[self].slicedArray.data]]
if call[name[is_an_array], parameter[name[self].slicedArray.mask]] begin[:]
variable[connected] assign[=] call[name[np].logical_and, parameter[name[connected], <ast.UnaryOp object at 0x7da1b04165f0>]]
variable[plotDataItem] assign[=] call[name[self].config.plotDataItemCti.createPlotDataItem, parameter[]]
call[name[plotDataItem].setData, parameter[name[self].slicedArray.data]]
call[name[self].plotItem.addItem, parameter[name[plotDataItem]]]
if name[self].config.probeCti.configValue begin[:]
call[name[self].probeLabel.setVisible, parameter[constant[True]]]
call[name[self].plotItem.addItem, parameter[name[self].crossLineVerShadow]]
call[name[self].plotItem.addItem, parameter[name[self].crossLineVertical]]
call[name[self].plotItem.addItem, parameter[name[self].probeDataItem]]
call[name[self].probeDataItem.setSymbolBrush, parameter[call[name[QtGui].QBrush, parameter[name[self].config.plotDataItemCti.penColor]]]]
call[name[self].probeDataItem.setSymbolSize, parameter[constant[10]]]
call[name[self].config.updateTarget, parameter[]] | keyword[def] identifier[_drawContents] ( identifier[self] , identifier[reason] = keyword[None] , identifier[initiator] = keyword[None] ):
literal[string]
identifier[self] . identifier[slicedArray] = identifier[self] . identifier[collector] . identifier[getSlicedArray] ()
keyword[if] keyword[not] identifier[self] . identifier[_hasValidData] ():
identifier[self] . identifier[_clearContents] ()
keyword[raise] identifier[InvalidDataError] ( literal[string] )
identifier[self] . identifier[slicedArray] . identifier[replaceMaskedValueWithNan] ()
identifier[self] . identifier[plotItem] . identifier[clear] ()
keyword[if] ( identifier[reason] == identifier[UpdateReason] . identifier[RTI_CHANGED] keyword[or]
identifier[reason] == identifier[UpdateReason] . identifier[COLLECTOR_COMBO_BOX] ):
identifier[self] . identifier[config] . identifier[xAxisRangeCti] . identifier[autoRangeCti] . identifier[data] = keyword[True]
identifier[self] . identifier[config] . identifier[yAxisRangeCti] . identifier[autoRangeCti] . identifier[data] = keyword[True]
identifier[self] . identifier[titleLabel] . identifier[setText] ( identifier[self] . identifier[configValue] ( literal[string] ). identifier[format] (** identifier[self] . identifier[collector] . identifier[rtiInfo] ))
identifier[connected] = identifier[np] . identifier[isfinite] ( identifier[self] . identifier[slicedArray] . identifier[data] )
keyword[if] identifier[is_an_array] ( identifier[self] . identifier[slicedArray] . identifier[mask] ):
identifier[connected] = identifier[np] . identifier[logical_and] ( identifier[connected] ,~ identifier[self] . identifier[slicedArray] . identifier[mask] )
keyword[else] :
identifier[connected] = identifier[np] . identifier[zeros_like] ( identifier[self] . identifier[slicedArray] . identifier[data] ) keyword[if] identifier[self] . identifier[slicedArray] . identifier[mask] keyword[else] identifier[connected]
identifier[plotDataItem] = identifier[self] . identifier[config] . identifier[plotDataItemCti] . identifier[createPlotDataItem] ()
identifier[plotDataItem] . identifier[setData] ( identifier[self] . identifier[slicedArray] . identifier[data] , identifier[connect] = identifier[connected] )
identifier[self] . identifier[plotItem] . identifier[addItem] ( identifier[plotDataItem] )
keyword[if] identifier[self] . identifier[config] . identifier[probeCti] . identifier[configValue] :
identifier[self] . identifier[probeLabel] . identifier[setVisible] ( keyword[True] )
identifier[self] . identifier[plotItem] . identifier[addItem] ( identifier[self] . identifier[crossLineVerShadow] , identifier[ignoreBounds] = keyword[True] )
identifier[self] . identifier[plotItem] . identifier[addItem] ( identifier[self] . identifier[crossLineVertical] , identifier[ignoreBounds] = keyword[True] )
identifier[self] . identifier[plotItem] . identifier[addItem] ( identifier[self] . identifier[probeDataItem] , identifier[ignoreBounds] = keyword[True] )
identifier[self] . identifier[probeDataItem] . identifier[setSymbolBrush] ( identifier[QtGui] . identifier[QBrush] ( identifier[self] . identifier[config] . identifier[plotDataItemCti] . identifier[penColor] ))
identifier[self] . identifier[probeDataItem] . identifier[setSymbolSize] ( literal[int] )
keyword[else] :
identifier[self] . identifier[probeLabel] . identifier[setVisible] ( keyword[False] )
identifier[self] . identifier[config] . identifier[updateTarget] () | def _drawContents(self, reason=None, initiator=None):
""" Draws the plot contents from the sliced array of the collected repo tree item.
The reason parameter is used to determine if the axes will be reset (the initiator
parameter is ignored). See AbstractInspector.updateContents for their description.
"""
self.slicedArray = self.collector.getSlicedArray()
if not self._hasValidData():
self._clearContents()
raise InvalidDataError('No data available or it does not contain real numbers') # depends on [control=['if'], data=[]]
# -- Valid plot data from here on --
# PyQtGraph doesn't handle masked arrays so we convert the masked values to Nans (missing
# data values are replaced by NaNs). The PyQtGraph line plot omits the Nans, which is great.
self.slicedArray.replaceMaskedValueWithNan() # will convert data to float if int
self.plotItem.clear()
# Reset the axes ranges (via the config)
if reason == UpdateReason.RTI_CHANGED or reason == UpdateReason.COLLECTOR_COMBO_BOX:
# self.config.yAxisRangeCti.setAutoRangeOn() doesn't work as refreshBlocked is True
# TODO: can refreshBlocked maybe only block the signals to prevent loops?
self.config.xAxisRangeCti.autoRangeCti.data = True
self.config.yAxisRangeCti.autoRangeCti.data = True # depends on [control=['if'], data=[]]
self.titleLabel.setText(self.configValue('title').format(**self.collector.rtiInfo))
connected = np.isfinite(self.slicedArray.data)
if is_an_array(self.slicedArray.mask):
connected = np.logical_and(connected, ~self.slicedArray.mask) # depends on [control=['if'], data=[]]
else:
connected = np.zeros_like(self.slicedArray.data) if self.slicedArray.mask else connected
plotDataItem = self.config.plotDataItemCti.createPlotDataItem()
plotDataItem.setData(self.slicedArray.data, connect=connected)
self.plotItem.addItem(plotDataItem)
if self.config.probeCti.configValue:
self.probeLabel.setVisible(True)
self.plotItem.addItem(self.crossLineVerShadow, ignoreBounds=True)
self.plotItem.addItem(self.crossLineVertical, ignoreBounds=True)
self.plotItem.addItem(self.probeDataItem, ignoreBounds=True)
self.probeDataItem.setSymbolBrush(QtGui.QBrush(self.config.plotDataItemCti.penColor))
self.probeDataItem.setSymbolSize(10) # depends on [control=['if'], data=[]]
else:
self.probeLabel.setVisible(False)
# Update the config tree from the (possibly) new state of the PgLinePlot1d inspector,
# e.g. the axis range may have changed while drawing.
self.config.updateTarget() |
def get_wordpress(self, service_id, version_number, name):
"""Get information on a specific wordpress."""
content = self._fetch("/service/%s/version/%d/wordpress/%s" % (service_id, version_number, name))
return FastlyWordpress(self, content) | def function[get_wordpress, parameter[self, service_id, version_number, name]]:
constant[Get information on a specific wordpress.]
variable[content] assign[=] call[name[self]._fetch, parameter[binary_operation[constant[/service/%s/version/%d/wordpress/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b11e2950>, <ast.Name object at 0x7da1b11e03d0>, <ast.Name object at 0x7da1b11e3610>]]]]]
return[call[name[FastlyWordpress], parameter[name[self], name[content]]]] | keyword[def] identifier[get_wordpress] ( identifier[self] , identifier[service_id] , identifier[version_number] , identifier[name] ):
literal[string]
identifier[content] = identifier[self] . identifier[_fetch] ( literal[string] %( identifier[service_id] , identifier[version_number] , identifier[name] ))
keyword[return] identifier[FastlyWordpress] ( identifier[self] , identifier[content] ) | def get_wordpress(self, service_id, version_number, name):
"""Get information on a specific wordpress."""
content = self._fetch('/service/%s/version/%d/wordpress/%s' % (service_id, version_number, name))
return FastlyWordpress(self, content) |
def down(context, repo_url):
"""(download) Synchronise remote repo to local repo.
If repo_url is given, then clone from remote URL.
"""
if repo_url == '':
context.obj.find_repo_type()
if context.obj.vc_name == 'git':
context.obj.call(['git', 'pull'])
elif context.obj.vc_name == 'hg':
context.obj.call(['hg', 'pull', '-u'])
else:
context.obj.call([context.obj.vc_name, 'clone', repo_url]) | def function[down, parameter[context, repo_url]]:
constant[(download) Synchronise remote repo to local repo.
If repo_url is given, then clone from remote URL.
]
if compare[name[repo_url] equal[==] constant[]] begin[:]
call[name[context].obj.find_repo_type, parameter[]]
if compare[name[context].obj.vc_name equal[==] constant[git]] begin[:]
call[name[context].obj.call, parameter[list[[<ast.Constant object at 0x7da18f720610>, <ast.Constant object at 0x7da18f721900>]]]] | keyword[def] identifier[down] ( identifier[context] , identifier[repo_url] ):
literal[string]
keyword[if] identifier[repo_url] == literal[string] :
identifier[context] . identifier[obj] . identifier[find_repo_type] ()
keyword[if] identifier[context] . identifier[obj] . identifier[vc_name] == literal[string] :
identifier[context] . identifier[obj] . identifier[call] ([ literal[string] , literal[string] ])
keyword[elif] identifier[context] . identifier[obj] . identifier[vc_name] == literal[string] :
identifier[context] . identifier[obj] . identifier[call] ([ literal[string] , literal[string] , literal[string] ])
keyword[else] :
identifier[context] . identifier[obj] . identifier[call] ([ identifier[context] . identifier[obj] . identifier[vc_name] , literal[string] , identifier[repo_url] ]) | def down(context, repo_url):
"""(download) Synchronise remote repo to local repo.
If repo_url is given, then clone from remote URL.
"""
if repo_url == '':
context.obj.find_repo_type()
if context.obj.vc_name == 'git':
context.obj.call(['git', 'pull']) # depends on [control=['if'], data=[]]
elif context.obj.vc_name == 'hg':
context.obj.call(['hg', 'pull', '-u']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
context.obj.call([context.obj.vc_name, 'clone', repo_url]) |
def median_interp(interp_object):
"""
Find the median of the function represented as an interpolation object.
"""
new_grid = np.sort(np.concatenate([interp_object.x[:-1] + 0.1*ii*np.diff(interp_object.x)
for ii in range(10)]).flatten())
tmp_prop = np.exp(-(interp_object(new_grid)-interp_object.y.min()))
tmp_cumsum = np.cumsum(0.5*(tmp_prop[1:]+tmp_prop[:-1])*np.diff(new_grid))
median_index = min(len(tmp_cumsum)-3, max(2,np.searchsorted(tmp_cumsum, tmp_cumsum[-1]*0.5)+1))
return new_grid[median_index] | def function[median_interp, parameter[interp_object]]:
constant[
Find the median of the function represented as an interpolation object.
]
variable[new_grid] assign[=] call[name[np].sort, parameter[call[call[name[np].concatenate, parameter[<ast.ListComp object at 0x7da20cabe380>]].flatten, parameter[]]]]
variable[tmp_prop] assign[=] call[name[np].exp, parameter[<ast.UnaryOp object at 0x7da18fe93e20>]]
variable[tmp_cumsum] assign[=] call[name[np].cumsum, parameter[binary_operation[binary_operation[constant[0.5] * binary_operation[call[name[tmp_prop]][<ast.Slice object at 0x7da204345d80>] + call[name[tmp_prop]][<ast.Slice object at 0x7da204346710>]]] * call[name[np].diff, parameter[name[new_grid]]]]]]
variable[median_index] assign[=] call[name[min], parameter[binary_operation[call[name[len], parameter[name[tmp_cumsum]]] - constant[3]], call[name[max], parameter[constant[2], binary_operation[call[name[np].searchsorted, parameter[name[tmp_cumsum], binary_operation[call[name[tmp_cumsum]][<ast.UnaryOp object at 0x7da204347130>] * constant[0.5]]]] + constant[1]]]]]]
return[call[name[new_grid]][name[median_index]]] | keyword[def] identifier[median_interp] ( identifier[interp_object] ):
literal[string]
identifier[new_grid] = identifier[np] . identifier[sort] ( identifier[np] . identifier[concatenate] ([ identifier[interp_object] . identifier[x] [:- literal[int] ]+ literal[int] * identifier[ii] * identifier[np] . identifier[diff] ( identifier[interp_object] . identifier[x] )
keyword[for] identifier[ii] keyword[in] identifier[range] ( literal[int] )]). identifier[flatten] ())
identifier[tmp_prop] = identifier[np] . identifier[exp] (-( identifier[interp_object] ( identifier[new_grid] )- identifier[interp_object] . identifier[y] . identifier[min] ()))
identifier[tmp_cumsum] = identifier[np] . identifier[cumsum] ( literal[int] *( identifier[tmp_prop] [ literal[int] :]+ identifier[tmp_prop] [:- literal[int] ])* identifier[np] . identifier[diff] ( identifier[new_grid] ))
identifier[median_index] = identifier[min] ( identifier[len] ( identifier[tmp_cumsum] )- literal[int] , identifier[max] ( literal[int] , identifier[np] . identifier[searchsorted] ( identifier[tmp_cumsum] , identifier[tmp_cumsum] [- literal[int] ]* literal[int] )+ literal[int] ))
keyword[return] identifier[new_grid] [ identifier[median_index] ] | def median_interp(interp_object):
"""
Find the median of the function represented as an interpolation object.
"""
new_grid = np.sort(np.concatenate([interp_object.x[:-1] + 0.1 * ii * np.diff(interp_object.x) for ii in range(10)]).flatten())
tmp_prop = np.exp(-(interp_object(new_grid) - interp_object.y.min()))
tmp_cumsum = np.cumsum(0.5 * (tmp_prop[1:] + tmp_prop[:-1]) * np.diff(new_grid))
median_index = min(len(tmp_cumsum) - 3, max(2, np.searchsorted(tmp_cumsum, tmp_cumsum[-1] * 0.5) + 1))
return new_grid[median_index] |
def manual_configure():
"""
Function to manually configure jackal.
"""
print("Manual configuring jackal")
mapping = { '1': 'y', '0': 'n'}
config = Config()
# Host
host = input_with_default("What is the Elasticsearch host?", config.get('jackal', 'host'))
config.set('jackal', 'host', host)
# SSL
if input_with_default("Use SSL?", mapping[config.get('jackal', 'use_ssl')]) == 'y':
config.set('jackal', 'use_ssl', '1')
if input_with_default("Setup custom server cert?", 'y') == 'y':
ca_certs = input_with_default("Server certificate location?", config.get('jackal', 'ca_certs'))
config.set('jackal', 'ca_certs', ca_certs)
else:
config.set('jackal', 'ca_certs', '')
else:
config.set('jackal', 'use_ssl', '0')
if input_with_default("Setup client certificates?", mapping[config.get('jackal', 'client_certs')]) == 'y':
config.set('jackal', 'client_certs', '1')
client_cert = input_with_default("Client cert location?", config.get('jackal', 'client_cert'))
config.set('jackal', 'client_cert', client_cert)
client_key = input_with_default("Client key location?", config.get('jackal', 'client_key'))
config.set('jackal', 'client_key', client_key)
else:
config.set('jackal', 'client_certs', '0')
# Index
index = input_with_default("What index prefix should jackal use?", config.get('jackal', 'index'))
config.set('jackal', 'index', index)
initialize_indices = (input_with_default("Do you want to initialize the indices?", 'y').lower() == 'y')
# Nmap
nmap_dir = input_with_default("What directory do you want to place the nmap results in?", config.get('nmap', 'directory'))
if not os.path.exists(nmap_dir):
os.makedirs(nmap_dir)
config.set('nmap', 'directory', nmap_dir)
nmap_options = input_with_default("What nmap options do you want to set for 'custom' (for example '-p 22,445')?", config.get('nmap', 'options'))
config.set('nmap', 'options', nmap_options)
# Nessus
configure_nessus = (input_with_default("Do you want to setup nessus?", 'n').lower() == 'y')
if configure_nessus:
nessus_host = input_with_default("What is the nessus host?", config.get('nessus', 'host'))
nessus_template = input_with_default("What template should jackal use?", config.get('nessus', 'template_name'))
nessus_access = input_with_default("What api access key should jackal use?", config.get('nessus', 'access_key'))
nessus_secret = input_with_default("What api secret key should jackal use?", config.get('nessus', 'secret_key'))
config.set('nessus', 'host', nessus_host)
config.set('nessus', 'template_name', nessus_template)
config.set('nessus', 'access_key', nessus_access)
config.set('nessus', 'secret_key', nessus_secret)
# Named pipes
configure_pipes = (input_with_default("Do you want to setup named pipes?", 'n').lower() == 'y')
if configure_pipes:
directory = input_with_default("What directory do you want to place the named pipes in?", config.get('pipes', 'directory'))
config.set('pipes', 'directory', directory)
config_file = input_with_default("What is the name of the named pipe config?", config.get('pipes', 'config_file'))
config.set('pipes', 'config_file', config_file)
if not os.path.exists(directory):
create = (input_with_default("Do you want to create the directory?", 'n').lower() == 'y')
if create:
os.makedirs(directory)
if not os.path.exists(os.path.join(config.config_dir, config_file)):
f = open(os.path.join(config.config_dir, config_file), 'a')
f.close()
config.write_config(initialize_indices) | def function[manual_configure, parameter[]]:
constant[
Function to manually configure jackal.
]
call[name[print], parameter[constant[Manual configuring jackal]]]
variable[mapping] assign[=] dictionary[[<ast.Constant object at 0x7da1afe3add0>, <ast.Constant object at 0x7da1afe38640>], [<ast.Constant object at 0x7da1afe391e0>, <ast.Constant object at 0x7da1afe3bee0>]]
variable[config] assign[=] call[name[Config], parameter[]]
variable[host] assign[=] call[name[input_with_default], parameter[constant[What is the Elasticsearch host?], call[name[config].get, parameter[constant[jackal], constant[host]]]]]
call[name[config].set, parameter[constant[jackal], constant[host], name[host]]]
if compare[call[name[input_with_default], parameter[constant[Use SSL?], call[name[mapping]][call[name[config].get, parameter[constant[jackal], constant[use_ssl]]]]]] equal[==] constant[y]] begin[:]
call[name[config].set, parameter[constant[jackal], constant[use_ssl], constant[1]]]
if compare[call[name[input_with_default], parameter[constant[Setup custom server cert?], constant[y]]] equal[==] constant[y]] begin[:]
variable[ca_certs] assign[=] call[name[input_with_default], parameter[constant[Server certificate location?], call[name[config].get, parameter[constant[jackal], constant[ca_certs]]]]]
call[name[config].set, parameter[constant[jackal], constant[ca_certs], name[ca_certs]]]
if compare[call[name[input_with_default], parameter[constant[Setup client certificates?], call[name[mapping]][call[name[config].get, parameter[constant[jackal], constant[client_certs]]]]]] equal[==] constant[y]] begin[:]
call[name[config].set, parameter[constant[jackal], constant[client_certs], constant[1]]]
variable[client_cert] assign[=] call[name[input_with_default], parameter[constant[Client cert location?], call[name[config].get, parameter[constant[jackal], constant[client_cert]]]]]
call[name[config].set, parameter[constant[jackal], constant[client_cert], name[client_cert]]]
variable[client_key] assign[=] call[name[input_with_default], parameter[constant[Client key location?], call[name[config].get, parameter[constant[jackal], constant[client_key]]]]]
call[name[config].set, parameter[constant[jackal], constant[client_key], name[client_key]]]
variable[index] assign[=] call[name[input_with_default], parameter[constant[What index prefix should jackal use?], call[name[config].get, parameter[constant[jackal], constant[index]]]]]
call[name[config].set, parameter[constant[jackal], constant[index], name[index]]]
variable[initialize_indices] assign[=] compare[call[call[name[input_with_default], parameter[constant[Do you want to initialize the indices?], constant[y]]].lower, parameter[]] equal[==] constant[y]]
variable[nmap_dir] assign[=] call[name[input_with_default], parameter[constant[What directory do you want to place the nmap results in?], call[name[config].get, parameter[constant[nmap], constant[directory]]]]]
if <ast.UnaryOp object at 0x7da1b009fca0> begin[:]
call[name[os].makedirs, parameter[name[nmap_dir]]]
call[name[config].set, parameter[constant[nmap], constant[directory], name[nmap_dir]]]
variable[nmap_options] assign[=] call[name[input_with_default], parameter[constant[What nmap options do you want to set for 'custom' (for example '-p 22,445')?], call[name[config].get, parameter[constant[nmap], constant[options]]]]]
call[name[config].set, parameter[constant[nmap], constant[options], name[nmap_options]]]
variable[configure_nessus] assign[=] compare[call[call[name[input_with_default], parameter[constant[Do you want to setup nessus?], constant[n]]].lower, parameter[]] equal[==] constant[y]]
if name[configure_nessus] begin[:]
variable[nessus_host] assign[=] call[name[input_with_default], parameter[constant[What is the nessus host?], call[name[config].get, parameter[constant[nessus], constant[host]]]]]
variable[nessus_template] assign[=] call[name[input_with_default], parameter[constant[What template should jackal use?], call[name[config].get, parameter[constant[nessus], constant[template_name]]]]]
variable[nessus_access] assign[=] call[name[input_with_default], parameter[constant[What api access key should jackal use?], call[name[config].get, parameter[constant[nessus], constant[access_key]]]]]
variable[nessus_secret] assign[=] call[name[input_with_default], parameter[constant[What api secret key should jackal use?], call[name[config].get, parameter[constant[nessus], constant[secret_key]]]]]
call[name[config].set, parameter[constant[nessus], constant[host], name[nessus_host]]]
call[name[config].set, parameter[constant[nessus], constant[template_name], name[nessus_template]]]
call[name[config].set, parameter[constant[nessus], constant[access_key], name[nessus_access]]]
call[name[config].set, parameter[constant[nessus], constant[secret_key], name[nessus_secret]]]
variable[configure_pipes] assign[=] compare[call[call[name[input_with_default], parameter[constant[Do you want to setup named pipes?], constant[n]]].lower, parameter[]] equal[==] constant[y]]
if name[configure_pipes] begin[:]
variable[directory] assign[=] call[name[input_with_default], parameter[constant[What directory do you want to place the named pipes in?], call[name[config].get, parameter[constant[pipes], constant[directory]]]]]
call[name[config].set, parameter[constant[pipes], constant[directory], name[directory]]]
variable[config_file] assign[=] call[name[input_with_default], parameter[constant[What is the name of the named pipe config?], call[name[config].get, parameter[constant[pipes], constant[config_file]]]]]
call[name[config].set, parameter[constant[pipes], constant[config_file], name[config_file]]]
if <ast.UnaryOp object at 0x7da1b009e350> begin[:]
variable[create] assign[=] compare[call[call[name[input_with_default], parameter[constant[Do you want to create the directory?], constant[n]]].lower, parameter[]] equal[==] constant[y]]
if name[create] begin[:]
call[name[os].makedirs, parameter[name[directory]]]
if <ast.UnaryOp object at 0x7da1b00236d0> begin[:]
variable[f] assign[=] call[name[open], parameter[call[name[os].path.join, parameter[name[config].config_dir, name[config_file]]], constant[a]]]
call[name[f].close, parameter[]]
call[name[config].write_config, parameter[name[initialize_indices]]] | keyword[def] identifier[manual_configure] ():
literal[string]
identifier[print] ( literal[string] )
identifier[mapping] ={ literal[string] : literal[string] , literal[string] : literal[string] }
identifier[config] = identifier[Config] ()
identifier[host] = identifier[input_with_default] ( literal[string] , identifier[config] . identifier[get] ( literal[string] , literal[string] ))
identifier[config] . identifier[set] ( literal[string] , literal[string] , identifier[host] )
keyword[if] identifier[input_with_default] ( literal[string] , identifier[mapping] [ identifier[config] . identifier[get] ( literal[string] , literal[string] )])== literal[string] :
identifier[config] . identifier[set] ( literal[string] , literal[string] , literal[string] )
keyword[if] identifier[input_with_default] ( literal[string] , literal[string] )== literal[string] :
identifier[ca_certs] = identifier[input_with_default] ( literal[string] , identifier[config] . identifier[get] ( literal[string] , literal[string] ))
identifier[config] . identifier[set] ( literal[string] , literal[string] , identifier[ca_certs] )
keyword[else] :
identifier[config] . identifier[set] ( literal[string] , literal[string] , literal[string] )
keyword[else] :
identifier[config] . identifier[set] ( literal[string] , literal[string] , literal[string] )
keyword[if] identifier[input_with_default] ( literal[string] , identifier[mapping] [ identifier[config] . identifier[get] ( literal[string] , literal[string] )])== literal[string] :
identifier[config] . identifier[set] ( literal[string] , literal[string] , literal[string] )
identifier[client_cert] = identifier[input_with_default] ( literal[string] , identifier[config] . identifier[get] ( literal[string] , literal[string] ))
identifier[config] . identifier[set] ( literal[string] , literal[string] , identifier[client_cert] )
identifier[client_key] = identifier[input_with_default] ( literal[string] , identifier[config] . identifier[get] ( literal[string] , literal[string] ))
identifier[config] . identifier[set] ( literal[string] , literal[string] , identifier[client_key] )
keyword[else] :
identifier[config] . identifier[set] ( literal[string] , literal[string] , literal[string] )
identifier[index] = identifier[input_with_default] ( literal[string] , identifier[config] . identifier[get] ( literal[string] , literal[string] ))
identifier[config] . identifier[set] ( literal[string] , literal[string] , identifier[index] )
identifier[initialize_indices] =( identifier[input_with_default] ( literal[string] , literal[string] ). identifier[lower] ()== literal[string] )
identifier[nmap_dir] = identifier[input_with_default] ( literal[string] , identifier[config] . identifier[get] ( literal[string] , literal[string] ))
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[nmap_dir] ):
identifier[os] . identifier[makedirs] ( identifier[nmap_dir] )
identifier[config] . identifier[set] ( literal[string] , literal[string] , identifier[nmap_dir] )
identifier[nmap_options] = identifier[input_with_default] ( literal[string] , identifier[config] . identifier[get] ( literal[string] , literal[string] ))
identifier[config] . identifier[set] ( literal[string] , literal[string] , identifier[nmap_options] )
identifier[configure_nessus] =( identifier[input_with_default] ( literal[string] , literal[string] ). identifier[lower] ()== literal[string] )
keyword[if] identifier[configure_nessus] :
identifier[nessus_host] = identifier[input_with_default] ( literal[string] , identifier[config] . identifier[get] ( literal[string] , literal[string] ))
identifier[nessus_template] = identifier[input_with_default] ( literal[string] , identifier[config] . identifier[get] ( literal[string] , literal[string] ))
identifier[nessus_access] = identifier[input_with_default] ( literal[string] , identifier[config] . identifier[get] ( literal[string] , literal[string] ))
identifier[nessus_secret] = identifier[input_with_default] ( literal[string] , identifier[config] . identifier[get] ( literal[string] , literal[string] ))
identifier[config] . identifier[set] ( literal[string] , literal[string] , identifier[nessus_host] )
identifier[config] . identifier[set] ( literal[string] , literal[string] , identifier[nessus_template] )
identifier[config] . identifier[set] ( literal[string] , literal[string] , identifier[nessus_access] )
identifier[config] . identifier[set] ( literal[string] , literal[string] , identifier[nessus_secret] )
identifier[configure_pipes] =( identifier[input_with_default] ( literal[string] , literal[string] ). identifier[lower] ()== literal[string] )
keyword[if] identifier[configure_pipes] :
identifier[directory] = identifier[input_with_default] ( literal[string] , identifier[config] . identifier[get] ( literal[string] , literal[string] ))
identifier[config] . identifier[set] ( literal[string] , literal[string] , identifier[directory] )
identifier[config_file] = identifier[input_with_default] ( literal[string] , identifier[config] . identifier[get] ( literal[string] , literal[string] ))
identifier[config] . identifier[set] ( literal[string] , literal[string] , identifier[config_file] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[directory] ):
identifier[create] =( identifier[input_with_default] ( literal[string] , literal[string] ). identifier[lower] ()== literal[string] )
keyword[if] identifier[create] :
identifier[os] . identifier[makedirs] ( identifier[directory] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[join] ( identifier[config] . identifier[config_dir] , identifier[config_file] )):
identifier[f] = identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[config] . identifier[config_dir] , identifier[config_file] ), literal[string] )
identifier[f] . identifier[close] ()
identifier[config] . identifier[write_config] ( identifier[initialize_indices] ) | def manual_configure():
"""
Function to manually configure jackal.
"""
print('Manual configuring jackal')
mapping = {'1': 'y', '0': 'n'}
config = Config()
# Host
host = input_with_default('What is the Elasticsearch host?', config.get('jackal', 'host'))
config.set('jackal', 'host', host)
# SSL
if input_with_default('Use SSL?', mapping[config.get('jackal', 'use_ssl')]) == 'y':
config.set('jackal', 'use_ssl', '1')
if input_with_default('Setup custom server cert?', 'y') == 'y':
ca_certs = input_with_default('Server certificate location?', config.get('jackal', 'ca_certs'))
config.set('jackal', 'ca_certs', ca_certs) # depends on [control=['if'], data=[]]
else:
config.set('jackal', 'ca_certs', '') # depends on [control=['if'], data=[]]
else:
config.set('jackal', 'use_ssl', '0')
if input_with_default('Setup client certificates?', mapping[config.get('jackal', 'client_certs')]) == 'y':
config.set('jackal', 'client_certs', '1')
client_cert = input_with_default('Client cert location?', config.get('jackal', 'client_cert'))
config.set('jackal', 'client_cert', client_cert)
client_key = input_with_default('Client key location?', config.get('jackal', 'client_key'))
config.set('jackal', 'client_key', client_key) # depends on [control=['if'], data=[]]
else:
config.set('jackal', 'client_certs', '0')
# Index
index = input_with_default('What index prefix should jackal use?', config.get('jackal', 'index'))
config.set('jackal', 'index', index)
initialize_indices = input_with_default('Do you want to initialize the indices?', 'y').lower() == 'y'
# Nmap
nmap_dir = input_with_default('What directory do you want to place the nmap results in?', config.get('nmap', 'directory'))
if not os.path.exists(nmap_dir):
os.makedirs(nmap_dir) # depends on [control=['if'], data=[]]
config.set('nmap', 'directory', nmap_dir)
nmap_options = input_with_default("What nmap options do you want to set for 'custom' (for example '-p 22,445')?", config.get('nmap', 'options'))
config.set('nmap', 'options', nmap_options)
# Nessus
configure_nessus = input_with_default('Do you want to setup nessus?', 'n').lower() == 'y'
if configure_nessus:
nessus_host = input_with_default('What is the nessus host?', config.get('nessus', 'host'))
nessus_template = input_with_default('What template should jackal use?', config.get('nessus', 'template_name'))
nessus_access = input_with_default('What api access key should jackal use?', config.get('nessus', 'access_key'))
nessus_secret = input_with_default('What api secret key should jackal use?', config.get('nessus', 'secret_key'))
config.set('nessus', 'host', nessus_host)
config.set('nessus', 'template_name', nessus_template)
config.set('nessus', 'access_key', nessus_access)
config.set('nessus', 'secret_key', nessus_secret) # depends on [control=['if'], data=[]]
# Named pipes
configure_pipes = input_with_default('Do you want to setup named pipes?', 'n').lower() == 'y'
if configure_pipes:
directory = input_with_default('What directory do you want to place the named pipes in?', config.get('pipes', 'directory'))
config.set('pipes', 'directory', directory)
config_file = input_with_default('What is the name of the named pipe config?', config.get('pipes', 'config_file'))
config.set('pipes', 'config_file', config_file)
if not os.path.exists(directory):
create = input_with_default('Do you want to create the directory?', 'n').lower() == 'y'
if create:
os.makedirs(directory) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not os.path.exists(os.path.join(config.config_dir, config_file)):
f = open(os.path.join(config.config_dir, config_file), 'a')
f.close() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
config.write_config(initialize_indices) |
def get_activity_form_for_create(self, objective_id=None, activity_record_types=None):
"""Gets the activity form for creating new activities.
A new form should be requested for each create transaction.
arg: activityRecordTypes (osid.type.Type): array of activity
record types
return: (osid.learning.ActivityForm) - the activity form
raise: NotFound - objectiveId is not found
raise: NullArgument - objectiveId or activityRecordTypes is
null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - unable to get form for requested record
types
compliance: mandatory - This method must be implemented.
"""
if objective_id is None:
raise NullArgument()
if activity_record_types is None:
pass # Still need to deal with the record_types argument
activity_form = objects.ActivityForm(osid_object_map=None, objective_id=objective_id)
self._forms[activity_form.get_id().get_identifier()] = not CREATED
return activity_form | def function[get_activity_form_for_create, parameter[self, objective_id, activity_record_types]]:
constant[Gets the activity form for creating new activities.
A new form should be requested for each create transaction.
arg: activityRecordTypes (osid.type.Type): array of activity
record types
return: (osid.learning.ActivityForm) - the activity form
raise: NotFound - objectiveId is not found
raise: NullArgument - objectiveId or activityRecordTypes is
null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - unable to get form for requested record
types
compliance: mandatory - This method must be implemented.
]
if compare[name[objective_id] is constant[None]] begin[:]
<ast.Raise object at 0x7da18f810520>
if compare[name[activity_record_types] is constant[None]] begin[:]
pass
variable[activity_form] assign[=] call[name[objects].ActivityForm, parameter[]]
call[name[self]._forms][call[call[name[activity_form].get_id, parameter[]].get_identifier, parameter[]]] assign[=] <ast.UnaryOp object at 0x7da18f8104f0>
return[name[activity_form]] | keyword[def] identifier[get_activity_form_for_create] ( identifier[self] , identifier[objective_id] = keyword[None] , identifier[activity_record_types] = keyword[None] ):
literal[string]
keyword[if] identifier[objective_id] keyword[is] keyword[None] :
keyword[raise] identifier[NullArgument] ()
keyword[if] identifier[activity_record_types] keyword[is] keyword[None] :
keyword[pass]
identifier[activity_form] = identifier[objects] . identifier[ActivityForm] ( identifier[osid_object_map] = keyword[None] , identifier[objective_id] = identifier[objective_id] )
identifier[self] . identifier[_forms] [ identifier[activity_form] . identifier[get_id] (). identifier[get_identifier] ()]= keyword[not] identifier[CREATED]
keyword[return] identifier[activity_form] | def get_activity_form_for_create(self, objective_id=None, activity_record_types=None):
"""Gets the activity form for creating new activities.
A new form should be requested for each create transaction.
arg: activityRecordTypes (osid.type.Type): array of activity
record types
return: (osid.learning.ActivityForm) - the activity form
raise: NotFound - objectiveId is not found
raise: NullArgument - objectiveId or activityRecordTypes is
null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - unable to get form for requested record
types
compliance: mandatory - This method must be implemented.
"""
if objective_id is None:
raise NullArgument() # depends on [control=['if'], data=[]]
if activity_record_types is None:
pass # Still need to deal with the record_types argument # depends on [control=['if'], data=[]]
activity_form = objects.ActivityForm(osid_object_map=None, objective_id=objective_id)
self._forms[activity_form.get_id().get_identifier()] = not CREATED
return activity_form |
def loudness(self):
"""bool: The Sonos speaker's loudness compensation.
True if on, False otherwise.
Loudness is a complicated topic. You can find a nice summary about this
feature here: http://forums.sonos.com/showthread.php?p=4698#post4698
"""
response = self.renderingControl.GetLoudness([
('InstanceID', 0),
('Channel', 'Master'),
])
loudness = response["CurrentLoudness"]
return True if int(loudness) else False | def function[loudness, parameter[self]]:
constant[bool: The Sonos speaker's loudness compensation.
True if on, False otherwise.
Loudness is a complicated topic. You can find a nice summary about this
feature here: http://forums.sonos.com/showthread.php?p=4698#post4698
]
variable[response] assign[=] call[name[self].renderingControl.GetLoudness, parameter[list[[<ast.Tuple object at 0x7da18f7206d0>, <ast.Tuple object at 0x7da18f7206a0>]]]]
variable[loudness] assign[=] call[name[response]][constant[CurrentLoudness]]
return[<ast.IfExp object at 0x7da18dc06920>] | keyword[def] identifier[loudness] ( identifier[self] ):
literal[string]
identifier[response] = identifier[self] . identifier[renderingControl] . identifier[GetLoudness] ([
( literal[string] , literal[int] ),
( literal[string] , literal[string] ),
])
identifier[loudness] = identifier[response] [ literal[string] ]
keyword[return] keyword[True] keyword[if] identifier[int] ( identifier[loudness] ) keyword[else] keyword[False] | def loudness(self):
"""bool: The Sonos speaker's loudness compensation.
True if on, False otherwise.
Loudness is a complicated topic. You can find a nice summary about this
feature here: http://forums.sonos.com/showthread.php?p=4698#post4698
"""
response = self.renderingControl.GetLoudness([('InstanceID', 0), ('Channel', 'Master')])
loudness = response['CurrentLoudness']
return True if int(loudness) else False |
def alpha(self, x, y, kwargs, k=None):
"""
deflection angles
:param x: x-position (preferentially arcsec)
:type x: numpy array
:param y: y-position (preferentially arcsec)
:type y: numpy array
:param kwargs: list of keyword arguments of lens model parameters matching the lens model classes
:param k: only evaluate the k-th lens model
:return: deflection angles in units of arcsec
"""
x = np.array(x, dtype=float)
y = np.array(y, dtype=float)
bool_list = self._bool_list(k)
x_, y_, kwargs_copy = self._update_foreground(x, y, kwargs)
f_x, f_y = np.zeros_like(x_), np.zeros_like(x_)
for i, func in enumerate(self.func_list):
if bool_list[i] is True:
if self._model_list[i] == 'SHEAR':
f_x_i, f_y_i = func.derivatives(x, y, **kwargs[i])
else:
f_x_i, f_y_i = func.derivatives(x_, y_, **kwargs_copy[i])
f_x += f_x_i
f_y += f_y_i
return f_x, f_y | def function[alpha, parameter[self, x, y, kwargs, k]]:
constant[
deflection angles
:param x: x-position (preferentially arcsec)
:type x: numpy array
:param y: y-position (preferentially arcsec)
:type y: numpy array
:param kwargs: list of keyword arguments of lens model parameters matching the lens model classes
:param k: only evaluate the k-th lens model
:return: deflection angles in units of arcsec
]
variable[x] assign[=] call[name[np].array, parameter[name[x]]]
variable[y] assign[=] call[name[np].array, parameter[name[y]]]
variable[bool_list] assign[=] call[name[self]._bool_list, parameter[name[k]]]
<ast.Tuple object at 0x7da1b04a7b20> assign[=] call[name[self]._update_foreground, parameter[name[x], name[y], name[kwargs]]]
<ast.Tuple object at 0x7da1b04a6e90> assign[=] tuple[[<ast.Call object at 0x7da1b04a7160>, <ast.Call object at 0x7da1b04a4460>]]
for taget[tuple[[<ast.Name object at 0x7da1b04a5600>, <ast.Name object at 0x7da1b04a4f40>]]] in starred[call[name[enumerate], parameter[name[self].func_list]]] begin[:]
if compare[call[name[bool_list]][name[i]] is constant[True]] begin[:]
if compare[call[name[self]._model_list][name[i]] equal[==] constant[SHEAR]] begin[:]
<ast.Tuple object at 0x7da1b04a7370> assign[=] call[name[func].derivatives, parameter[name[x], name[y]]]
<ast.AugAssign object at 0x7da18bcc8220>
<ast.AugAssign object at 0x7da18bcc9480>
return[tuple[[<ast.Name object at 0x7da18bcc9990>, <ast.Name object at 0x7da18bcc8f10>]]] | keyword[def] identifier[alpha] ( identifier[self] , identifier[x] , identifier[y] , identifier[kwargs] , identifier[k] = keyword[None] ):
literal[string]
identifier[x] = identifier[np] . identifier[array] ( identifier[x] , identifier[dtype] = identifier[float] )
identifier[y] = identifier[np] . identifier[array] ( identifier[y] , identifier[dtype] = identifier[float] )
identifier[bool_list] = identifier[self] . identifier[_bool_list] ( identifier[k] )
identifier[x_] , identifier[y_] , identifier[kwargs_copy] = identifier[self] . identifier[_update_foreground] ( identifier[x] , identifier[y] , identifier[kwargs] )
identifier[f_x] , identifier[f_y] = identifier[np] . identifier[zeros_like] ( identifier[x_] ), identifier[np] . identifier[zeros_like] ( identifier[x_] )
keyword[for] identifier[i] , identifier[func] keyword[in] identifier[enumerate] ( identifier[self] . identifier[func_list] ):
keyword[if] identifier[bool_list] [ identifier[i] ] keyword[is] keyword[True] :
keyword[if] identifier[self] . identifier[_model_list] [ identifier[i] ]== literal[string] :
identifier[f_x_i] , identifier[f_y_i] = identifier[func] . identifier[derivatives] ( identifier[x] , identifier[y] ,** identifier[kwargs] [ identifier[i] ])
keyword[else] :
identifier[f_x_i] , identifier[f_y_i] = identifier[func] . identifier[derivatives] ( identifier[x_] , identifier[y_] ,** identifier[kwargs_copy] [ identifier[i] ])
identifier[f_x] += identifier[f_x_i]
identifier[f_y] += identifier[f_y_i]
keyword[return] identifier[f_x] , identifier[f_y] | def alpha(self, x, y, kwargs, k=None):
"""
deflection angles
:param x: x-position (preferentially arcsec)
:type x: numpy array
:param y: y-position (preferentially arcsec)
:type y: numpy array
:param kwargs: list of keyword arguments of lens model parameters matching the lens model classes
:param k: only evaluate the k-th lens model
:return: deflection angles in units of arcsec
"""
x = np.array(x, dtype=float)
y = np.array(y, dtype=float)
bool_list = self._bool_list(k)
(x_, y_, kwargs_copy) = self._update_foreground(x, y, kwargs)
(f_x, f_y) = (np.zeros_like(x_), np.zeros_like(x_))
for (i, func) in enumerate(self.func_list):
if bool_list[i] is True:
if self._model_list[i] == 'SHEAR':
(f_x_i, f_y_i) = func.derivatives(x, y, **kwargs[i]) # depends on [control=['if'], data=[]]
else:
(f_x_i, f_y_i) = func.derivatives(x_, y_, **kwargs_copy[i])
f_x += f_x_i
f_y += f_y_i # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return (f_x, f_y) |
def split_iterable_as_iterable(self, values):
"""Group iterable into iterables, in the order of the keys
Parameters
----------
values : iterable of length equal to keys
iterable of values to be grouped
Yields
------
iterable of items in values
Notes
-----
Memory consumption depends on the amount of sorting required
Worst case, if index.sorter[-1] = 0, we need to consume the entire value iterable,
before we can start yielding any output
But to the extent that the keys are already sorted, the grouping is lazy
"""
values = iter(enumerate(values))
cache = dict()
def get_value(ti):
try:
return cache.pop(ti)
except:
while True:
i, v = next(values)
if i==ti:
return v
cache[i] = v
s = iter(self.index.sorter)
for c in self.count:
yield (get_value(i) for i in itertools.islice(s, int(c))) | def function[split_iterable_as_iterable, parameter[self, values]]:
constant[Group iterable into iterables, in the order of the keys
Parameters
----------
values : iterable of length equal to keys
iterable of values to be grouped
Yields
------
iterable of items in values
Notes
-----
Memory consumption depends on the amount of sorting required
Worst case, if index.sorter[-1] = 0, we need to consume the entire value iterable,
before we can start yielding any output
But to the extent that the keys are already sorted, the grouping is lazy
]
variable[values] assign[=] call[name[iter], parameter[call[name[enumerate], parameter[name[values]]]]]
variable[cache] assign[=] call[name[dict], parameter[]]
def function[get_value, parameter[ti]]:
<ast.Try object at 0x7da20cabd570>
variable[s] assign[=] call[name[iter], parameter[name[self].index.sorter]]
for taget[name[c]] in starred[name[self].count] begin[:]
<ast.Yield object at 0x7da18dc99e70> | keyword[def] identifier[split_iterable_as_iterable] ( identifier[self] , identifier[values] ):
literal[string]
identifier[values] = identifier[iter] ( identifier[enumerate] ( identifier[values] ))
identifier[cache] = identifier[dict] ()
keyword[def] identifier[get_value] ( identifier[ti] ):
keyword[try] :
keyword[return] identifier[cache] . identifier[pop] ( identifier[ti] )
keyword[except] :
keyword[while] keyword[True] :
identifier[i] , identifier[v] = identifier[next] ( identifier[values] )
keyword[if] identifier[i] == identifier[ti] :
keyword[return] identifier[v]
identifier[cache] [ identifier[i] ]= identifier[v]
identifier[s] = identifier[iter] ( identifier[self] . identifier[index] . identifier[sorter] )
keyword[for] identifier[c] keyword[in] identifier[self] . identifier[count] :
keyword[yield] ( identifier[get_value] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[itertools] . identifier[islice] ( identifier[s] , identifier[int] ( identifier[c] ))) | def split_iterable_as_iterable(self, values):
"""Group iterable into iterables, in the order of the keys
Parameters
----------
values : iterable of length equal to keys
iterable of values to be grouped
Yields
------
iterable of items in values
Notes
-----
Memory consumption depends on the amount of sorting required
Worst case, if index.sorter[-1] = 0, we need to consume the entire value iterable,
before we can start yielding any output
But to the extent that the keys are already sorted, the grouping is lazy
"""
values = iter(enumerate(values))
cache = dict()
def get_value(ti):
try:
return cache.pop(ti) # depends on [control=['try'], data=[]]
except:
while True:
(i, v) = next(values)
if i == ti:
return v # depends on [control=['if'], data=[]]
cache[i] = v # depends on [control=['while'], data=[]] # depends on [control=['except'], data=[]]
s = iter(self.index.sorter)
for c in self.count:
yield (get_value(i) for i in itertools.islice(s, int(c))) # depends on [control=['for'], data=['c']] |
def __parse_identities(self, json):
"""Parse identities using Stackalytics format.
The Stackalytics identities format is a JSON document under the
"users" key. The document should follow the next schema:
{
"users": [
{
"launchpad_id": "0-jsmith",
"gerrit_id": "jsmith",
"companies": [
{
"company_name": "Example",
"end_date": null
}
],
"user_name": "John Smith",
"emails": ["jsmith@example.com", "jsmith@example.net"]
},
{
"companies": [
{
"company_name": "Bitergia",
"end_date": null
},
{
"company_name": "Example",
"end_date": "2010-Jan-01"
}
],
"user_name": "John Doe",
"emails": ["jdoe@bitergia.com", "jdoe@example.com"]
}
]
}
:parse json: JSON object to parse
:raise InvalidFormatError: raised when the format of the JSON is
not valid.
"""
try:
for user in json['users']:
name = self.__encode(user['user_name'])
uuid = name
uid = UniqueIdentity(uuid=uuid)
identity = Identity(name=name, email=None, username=None,
source=self.source, uuid=uuid)
uid.identities.append(identity)
for email_addr in user['emails']:
email = self.__encode(email_addr)
identity = Identity(name=name, email=email, username=None,
source=self.source, uuid=uuid)
uid.identities.append(identity)
for site_id in ['gerrit_id', 'launchpad_id']:
username = user.get(site_id, None)
if not username:
continue
username = self.__encode(username)
source = self.source + ':' + site_id.replace('_id', '')
identity = Identity(name=name, email=None, username=username,
source=source, uuid=uuid)
uid.identities.append(identity)
for rol in self.__parse_enrollments(user):
uid.enrollments.append(rol)
self._identities[uuid] = uid
except KeyError as e:
msg = "invalid json format. Attribute %s not found" % e.args
raise InvalidFormatError(cause=msg) | def function[__parse_identities, parameter[self, json]]:
constant[Parse identities using Stackalytics format.
The Stackalytics identities format is a JSON document under the
"users" key. The document should follow the next schema:
{
"users": [
{
"launchpad_id": "0-jsmith",
"gerrit_id": "jsmith",
"companies": [
{
"company_name": "Example",
"end_date": null
}
],
"user_name": "John Smith",
"emails": ["jsmith@example.com", "jsmith@example.net"]
},
{
"companies": [
{
"company_name": "Bitergia",
"end_date": null
},
{
"company_name": "Example",
"end_date": "2010-Jan-01"
}
],
"user_name": "John Doe",
"emails": ["jdoe@bitergia.com", "jdoe@example.com"]
}
]
}
:parse json: JSON object to parse
:raise InvalidFormatError: raised when the format of the JSON is
not valid.
]
<ast.Try object at 0x7da1b0ebd570> | keyword[def] identifier[__parse_identities] ( identifier[self] , identifier[json] ):
literal[string]
keyword[try] :
keyword[for] identifier[user] keyword[in] identifier[json] [ literal[string] ]:
identifier[name] = identifier[self] . identifier[__encode] ( identifier[user] [ literal[string] ])
identifier[uuid] = identifier[name]
identifier[uid] = identifier[UniqueIdentity] ( identifier[uuid] = identifier[uuid] )
identifier[identity] = identifier[Identity] ( identifier[name] = identifier[name] , identifier[email] = keyword[None] , identifier[username] = keyword[None] ,
identifier[source] = identifier[self] . identifier[source] , identifier[uuid] = identifier[uuid] )
identifier[uid] . identifier[identities] . identifier[append] ( identifier[identity] )
keyword[for] identifier[email_addr] keyword[in] identifier[user] [ literal[string] ]:
identifier[email] = identifier[self] . identifier[__encode] ( identifier[email_addr] )
identifier[identity] = identifier[Identity] ( identifier[name] = identifier[name] , identifier[email] = identifier[email] , identifier[username] = keyword[None] ,
identifier[source] = identifier[self] . identifier[source] , identifier[uuid] = identifier[uuid] )
identifier[uid] . identifier[identities] . identifier[append] ( identifier[identity] )
keyword[for] identifier[site_id] keyword[in] [ literal[string] , literal[string] ]:
identifier[username] = identifier[user] . identifier[get] ( identifier[site_id] , keyword[None] )
keyword[if] keyword[not] identifier[username] :
keyword[continue]
identifier[username] = identifier[self] . identifier[__encode] ( identifier[username] )
identifier[source] = identifier[self] . identifier[source] + literal[string] + identifier[site_id] . identifier[replace] ( literal[string] , literal[string] )
identifier[identity] = identifier[Identity] ( identifier[name] = identifier[name] , identifier[email] = keyword[None] , identifier[username] = identifier[username] ,
identifier[source] = identifier[source] , identifier[uuid] = identifier[uuid] )
identifier[uid] . identifier[identities] . identifier[append] ( identifier[identity] )
keyword[for] identifier[rol] keyword[in] identifier[self] . identifier[__parse_enrollments] ( identifier[user] ):
identifier[uid] . identifier[enrollments] . identifier[append] ( identifier[rol] )
identifier[self] . identifier[_identities] [ identifier[uuid] ]= identifier[uid]
keyword[except] identifier[KeyError] keyword[as] identifier[e] :
identifier[msg] = literal[string] % identifier[e] . identifier[args]
keyword[raise] identifier[InvalidFormatError] ( identifier[cause] = identifier[msg] ) | def __parse_identities(self, json):
"""Parse identities using Stackalytics format.
The Stackalytics identities format is a JSON document under the
"users" key. The document should follow the next schema:
{
"users": [
{
"launchpad_id": "0-jsmith",
"gerrit_id": "jsmith",
"companies": [
{
"company_name": "Example",
"end_date": null
}
],
"user_name": "John Smith",
"emails": ["jsmith@example.com", "jsmith@example.net"]
},
{
"companies": [
{
"company_name": "Bitergia",
"end_date": null
},
{
"company_name": "Example",
"end_date": "2010-Jan-01"
}
],
"user_name": "John Doe",
"emails": ["jdoe@bitergia.com", "jdoe@example.com"]
}
]
}
:parse json: JSON object to parse
:raise InvalidFormatError: raised when the format of the JSON is
not valid.
"""
try:
for user in json['users']:
name = self.__encode(user['user_name'])
uuid = name
uid = UniqueIdentity(uuid=uuid)
identity = Identity(name=name, email=None, username=None, source=self.source, uuid=uuid)
uid.identities.append(identity)
for email_addr in user['emails']:
email = self.__encode(email_addr)
identity = Identity(name=name, email=email, username=None, source=self.source, uuid=uuid)
uid.identities.append(identity) # depends on [control=['for'], data=['email_addr']]
for site_id in ['gerrit_id', 'launchpad_id']:
username = user.get(site_id, None)
if not username:
continue # depends on [control=['if'], data=[]]
username = self.__encode(username)
source = self.source + ':' + site_id.replace('_id', '')
identity = Identity(name=name, email=None, username=username, source=source, uuid=uuid)
uid.identities.append(identity) # depends on [control=['for'], data=['site_id']]
for rol in self.__parse_enrollments(user):
uid.enrollments.append(rol) # depends on [control=['for'], data=['rol']]
self._identities[uuid] = uid # depends on [control=['for'], data=['user']] # depends on [control=['try'], data=[]]
except KeyError as e:
msg = 'invalid json format. Attribute %s not found' % e.args
raise InvalidFormatError(cause=msg) # depends on [control=['except'], data=['e']] |
def trim_by_coverage(self, min_reads):
"""
Given the min number of reads overlapping each nucleotide of
a variant sequence, trim this sequence by getting rid of positions
which are overlapped by fewer reads than specified.
"""
read_count_array = self.coverage()
logger.info("Coverage: %s (len=%d)" % (
read_count_array, len(read_count_array)))
sufficient_coverage_mask = read_count_array >= min_reads
sufficient_coverage_indices = np.argwhere(sufficient_coverage_mask)
if len(sufficient_coverage_indices) == 0:
logger.debug("No bases in %s have coverage >= %d" % (self, min_reads))
return VariantSequence(prefix="", alt="", suffix="", reads=self.reads)
variant_start_index, variant_end_index = self.variant_indices()
# assuming that coverage drops off monotonically away from
# variant nucleotides
first_covered_index = sufficient_coverage_indices.min()
last_covered_index = sufficient_coverage_indices.max()
# adding 1 to last_covered_index since it's an inclusive index
# whereas variant_end_index is the end of a half-open interval
if (first_covered_index > variant_start_index or
last_covered_index + 1 < variant_end_index):
# Example:
# Nucleotide sequence:
# ACCCTTTT|AA|GGCGCGCC
# Coverage:
# 12222333|44|33333211
# Then the mask for bases covered >= 4x would be:
# ________|**|________
# with indices:
# first_covered_index = 9
# last_covered_index = 10
# variant_start_index = 9
# variant_end_index = 11
logger.debug("Some variant bases in %s don't have coverage >= %d" % (
self, min_reads))
return VariantSequence(prefix="", alt="", suffix="", reads=self.reads)
return VariantSequence(
prefix=self.prefix[first_covered_index:],
alt=self.alt,
suffix=self.suffix[:last_covered_index - variant_end_index + 1],
reads=self.reads) | def function[trim_by_coverage, parameter[self, min_reads]]:
constant[
Given the min number of reads overlapping each nucleotide of
a variant sequence, trim this sequence by getting rid of positions
which are overlapped by fewer reads than specified.
]
variable[read_count_array] assign[=] call[name[self].coverage, parameter[]]
call[name[logger].info, parameter[binary_operation[constant[Coverage: %s (len=%d)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6abf70>, <ast.Call object at 0x7da1b257d4e0>]]]]]
variable[sufficient_coverage_mask] assign[=] compare[name[read_count_array] greater_or_equal[>=] name[min_reads]]
variable[sufficient_coverage_indices] assign[=] call[name[np].argwhere, parameter[name[sufficient_coverage_mask]]]
if compare[call[name[len], parameter[name[sufficient_coverage_indices]]] equal[==] constant[0]] begin[:]
call[name[logger].debug, parameter[binary_operation[constant[No bases in %s have coverage >= %d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b257ed70>, <ast.Name object at 0x7da1b257e9b0>]]]]]
return[call[name[VariantSequence], parameter[]]]
<ast.Tuple object at 0x7da1b257d840> assign[=] call[name[self].variant_indices, parameter[]]
variable[first_covered_index] assign[=] call[name[sufficient_coverage_indices].min, parameter[]]
variable[last_covered_index] assign[=] call[name[sufficient_coverage_indices].max, parameter[]]
if <ast.BoolOp object at 0x7da1b257eb60> begin[:]
call[name[logger].debug, parameter[binary_operation[constant[Some variant bases in %s don't have coverage >= %d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b257d2d0>, <ast.Name object at 0x7da1b257ec80>]]]]]
return[call[name[VariantSequence], parameter[]]]
return[call[name[VariantSequence], parameter[]]] | keyword[def] identifier[trim_by_coverage] ( identifier[self] , identifier[min_reads] ):
literal[string]
identifier[read_count_array] = identifier[self] . identifier[coverage] ()
identifier[logger] . identifier[info] ( literal[string] %(
identifier[read_count_array] , identifier[len] ( identifier[read_count_array] )))
identifier[sufficient_coverage_mask] = identifier[read_count_array] >= identifier[min_reads]
identifier[sufficient_coverage_indices] = identifier[np] . identifier[argwhere] ( identifier[sufficient_coverage_mask] )
keyword[if] identifier[len] ( identifier[sufficient_coverage_indices] )== literal[int] :
identifier[logger] . identifier[debug] ( literal[string] %( identifier[self] , identifier[min_reads] ))
keyword[return] identifier[VariantSequence] ( identifier[prefix] = literal[string] , identifier[alt] = literal[string] , identifier[suffix] = literal[string] , identifier[reads] = identifier[self] . identifier[reads] )
identifier[variant_start_index] , identifier[variant_end_index] = identifier[self] . identifier[variant_indices] ()
identifier[first_covered_index] = identifier[sufficient_coverage_indices] . identifier[min] ()
identifier[last_covered_index] = identifier[sufficient_coverage_indices] . identifier[max] ()
keyword[if] ( identifier[first_covered_index] > identifier[variant_start_index] keyword[or]
identifier[last_covered_index] + literal[int] < identifier[variant_end_index] ):
identifier[logger] . identifier[debug] ( literal[string] %(
identifier[self] , identifier[min_reads] ))
keyword[return] identifier[VariantSequence] ( identifier[prefix] = literal[string] , identifier[alt] = literal[string] , identifier[suffix] = literal[string] , identifier[reads] = identifier[self] . identifier[reads] )
keyword[return] identifier[VariantSequence] (
identifier[prefix] = identifier[self] . identifier[prefix] [ identifier[first_covered_index] :],
identifier[alt] = identifier[self] . identifier[alt] ,
identifier[suffix] = identifier[self] . identifier[suffix] [: identifier[last_covered_index] - identifier[variant_end_index] + literal[int] ],
identifier[reads] = identifier[self] . identifier[reads] ) | def trim_by_coverage(self, min_reads):
"""
Given the min number of reads overlapping each nucleotide of
a variant sequence, trim this sequence by getting rid of positions
which are overlapped by fewer reads than specified.
"""
read_count_array = self.coverage()
logger.info('Coverage: %s (len=%d)' % (read_count_array, len(read_count_array)))
sufficient_coverage_mask = read_count_array >= min_reads
sufficient_coverage_indices = np.argwhere(sufficient_coverage_mask)
if len(sufficient_coverage_indices) == 0:
logger.debug('No bases in %s have coverage >= %d' % (self, min_reads))
return VariantSequence(prefix='', alt='', suffix='', reads=self.reads) # depends on [control=['if'], data=[]]
(variant_start_index, variant_end_index) = self.variant_indices()
# assuming that coverage drops off monotonically away from
# variant nucleotides
first_covered_index = sufficient_coverage_indices.min()
last_covered_index = sufficient_coverage_indices.max()
# adding 1 to last_covered_index since it's an inclusive index
# whereas variant_end_index is the end of a half-open interval
if first_covered_index > variant_start_index or last_covered_index + 1 < variant_end_index:
# Example:
# Nucleotide sequence:
# ACCCTTTT|AA|GGCGCGCC
# Coverage:
# 12222333|44|33333211
# Then the mask for bases covered >= 4x would be:
# ________|**|________
# with indices:
# first_covered_index = 9
# last_covered_index = 10
# variant_start_index = 9
# variant_end_index = 11
logger.debug("Some variant bases in %s don't have coverage >= %d" % (self, min_reads))
return VariantSequence(prefix='', alt='', suffix='', reads=self.reads) # depends on [control=['if'], data=[]]
return VariantSequence(prefix=self.prefix[first_covered_index:], alt=self.alt, suffix=self.suffix[:last_covered_index - variant_end_index + 1], reads=self.reads) |
def _open_file(self, mode, encoding=None):
"""
Opens the next current file.
:param str mode: The mode for opening the file.
:param str encoding: The encoding of the file.
"""
if self._filename[-4:] == '.bz2':
self._file = bz2.open(self._filename, mode=mode, encoding=encoding)
else:
self._file = open(self._filename, mode=mode, encoding=encoding) | def function[_open_file, parameter[self, mode, encoding]]:
constant[
Opens the next current file.
:param str mode: The mode for opening the file.
:param str encoding: The encoding of the file.
]
if compare[call[name[self]._filename][<ast.Slice object at 0x7da18dc9b670>] equal[==] constant[.bz2]] begin[:]
name[self]._file assign[=] call[name[bz2].open, parameter[name[self]._filename]] | keyword[def] identifier[_open_file] ( identifier[self] , identifier[mode] , identifier[encoding] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[_filename] [- literal[int] :]== literal[string] :
identifier[self] . identifier[_file] = identifier[bz2] . identifier[open] ( identifier[self] . identifier[_filename] , identifier[mode] = identifier[mode] , identifier[encoding] = identifier[encoding] )
keyword[else] :
identifier[self] . identifier[_file] = identifier[open] ( identifier[self] . identifier[_filename] , identifier[mode] = identifier[mode] , identifier[encoding] = identifier[encoding] ) | def _open_file(self, mode, encoding=None):
"""
Opens the next current file.
:param str mode: The mode for opening the file.
:param str encoding: The encoding of the file.
"""
if self._filename[-4:] == '.bz2':
self._file = bz2.open(self._filename, mode=mode, encoding=encoding) # depends on [control=['if'], data=[]]
else:
self._file = open(self._filename, mode=mode, encoding=encoding) |
def RemoveSession(self, session_path):
'''OBEX method to remove an existing transfer session.
This takes the path of the transfer Session object and removes it.
'''
manager = mockobject.objects['/']
# Remove all the session's transfers.
transfer_id = 0
while session_path + '/transfer' + str(transfer_id) in mockobject.objects:
transfer_path = session_path + '/transfer' + str(transfer_id)
transfer_id += 1
self.RemoveObject(transfer_path)
manager.EmitSignal(OBJECT_MANAGER_IFACE, 'InterfacesRemoved',
'oas', [
dbus.ObjectPath(transfer_path),
[TRANSFER_IFACE],
])
# Remove the session itself.
self.RemoveObject(session_path)
manager.EmitSignal(OBJECT_MANAGER_IFACE, 'InterfacesRemoved',
'oas', [
dbus.ObjectPath(session_path),
[SESSION_IFACE, PHONEBOOK_ACCESS_IFACE],
]) | def function[RemoveSession, parameter[self, session_path]]:
constant[OBEX method to remove an existing transfer session.
This takes the path of the transfer Session object and removes it.
]
variable[manager] assign[=] call[name[mockobject].objects][constant[/]]
variable[transfer_id] assign[=] constant[0]
while compare[binary_operation[binary_operation[name[session_path] + constant[/transfer]] + call[name[str], parameter[name[transfer_id]]]] in name[mockobject].objects] begin[:]
variable[transfer_path] assign[=] binary_operation[binary_operation[name[session_path] + constant[/transfer]] + call[name[str], parameter[name[transfer_id]]]]
<ast.AugAssign object at 0x7da204344fa0>
call[name[self].RemoveObject, parameter[name[transfer_path]]]
call[name[manager].EmitSignal, parameter[name[OBJECT_MANAGER_IFACE], constant[InterfacesRemoved], constant[oas], list[[<ast.Call object at 0x7da204346da0>, <ast.List object at 0x7da2043458a0>]]]]
call[name[self].RemoveObject, parameter[name[session_path]]]
call[name[manager].EmitSignal, parameter[name[OBJECT_MANAGER_IFACE], constant[InterfacesRemoved], constant[oas], list[[<ast.Call object at 0x7da20e963370>, <ast.List object at 0x7da1b0efeaa0>]]]] | keyword[def] identifier[RemoveSession] ( identifier[self] , identifier[session_path] ):
literal[string]
identifier[manager] = identifier[mockobject] . identifier[objects] [ literal[string] ]
identifier[transfer_id] = literal[int]
keyword[while] identifier[session_path] + literal[string] + identifier[str] ( identifier[transfer_id] ) keyword[in] identifier[mockobject] . identifier[objects] :
identifier[transfer_path] = identifier[session_path] + literal[string] + identifier[str] ( identifier[transfer_id] )
identifier[transfer_id] += literal[int]
identifier[self] . identifier[RemoveObject] ( identifier[transfer_path] )
identifier[manager] . identifier[EmitSignal] ( identifier[OBJECT_MANAGER_IFACE] , literal[string] ,
literal[string] ,[
identifier[dbus] . identifier[ObjectPath] ( identifier[transfer_path] ),
[ identifier[TRANSFER_IFACE] ],
])
identifier[self] . identifier[RemoveObject] ( identifier[session_path] )
identifier[manager] . identifier[EmitSignal] ( identifier[OBJECT_MANAGER_IFACE] , literal[string] ,
literal[string] ,[
identifier[dbus] . identifier[ObjectPath] ( identifier[session_path] ),
[ identifier[SESSION_IFACE] , identifier[PHONEBOOK_ACCESS_IFACE] ],
]) | def RemoveSession(self, session_path):
"""OBEX method to remove an existing transfer session.
This takes the path of the transfer Session object and removes it.
"""
manager = mockobject.objects['/']
# Remove all the session's transfers.
transfer_id = 0
while session_path + '/transfer' + str(transfer_id) in mockobject.objects:
transfer_path = session_path + '/transfer' + str(transfer_id)
transfer_id += 1
self.RemoveObject(transfer_path)
manager.EmitSignal(OBJECT_MANAGER_IFACE, 'InterfacesRemoved', 'oas', [dbus.ObjectPath(transfer_path), [TRANSFER_IFACE]]) # depends on [control=['while'], data=[]]
# Remove the session itself.
self.RemoveObject(session_path)
manager.EmitSignal(OBJECT_MANAGER_IFACE, 'InterfacesRemoved', 'oas', [dbus.ObjectPath(session_path), [SESSION_IFACE, PHONEBOOK_ACCESS_IFACE]]) |
def _add_block_number_to_hash_lookup(db: BaseDB, header: BlockHeader) -> None:
"""
Sets a record in the database to allow looking up this header by its
block number.
"""
block_number_to_hash_key = SchemaV1.make_block_number_to_hash_lookup_key(
header.block_number
)
db.set(
block_number_to_hash_key,
rlp.encode(header.hash, sedes=rlp.sedes.binary),
) | def function[_add_block_number_to_hash_lookup, parameter[db, header]]:
constant[
Sets a record in the database to allow looking up this header by its
block number.
]
variable[block_number_to_hash_key] assign[=] call[name[SchemaV1].make_block_number_to_hash_lookup_key, parameter[name[header].block_number]]
call[name[db].set, parameter[name[block_number_to_hash_key], call[name[rlp].encode, parameter[name[header].hash]]]] | keyword[def] identifier[_add_block_number_to_hash_lookup] ( identifier[db] : identifier[BaseDB] , identifier[header] : identifier[BlockHeader] )-> keyword[None] :
literal[string]
identifier[block_number_to_hash_key] = identifier[SchemaV1] . identifier[make_block_number_to_hash_lookup_key] (
identifier[header] . identifier[block_number]
)
identifier[db] . identifier[set] (
identifier[block_number_to_hash_key] ,
identifier[rlp] . identifier[encode] ( identifier[header] . identifier[hash] , identifier[sedes] = identifier[rlp] . identifier[sedes] . identifier[binary] ),
) | def _add_block_number_to_hash_lookup(db: BaseDB, header: BlockHeader) -> None:
"""
Sets a record in the database to allow looking up this header by its
block number.
"""
block_number_to_hash_key = SchemaV1.make_block_number_to_hash_lookup_key(header.block_number)
db.set(block_number_to_hash_key, rlp.encode(header.hash, sedes=rlp.sedes.binary)) |
def _find_jar(self, path0=None):
"""
Return the location of an h2o.jar executable.
:param path0: Explicitly given h2o.jar path. If provided, then we will simply check whether the file is there,
otherwise we will search for an executable in locations returned by ._jar_paths().
:raises H2OStartupError: if no h2o.jar executable can be found.
"""
jar_paths = [path0] if path0 else self._jar_paths()
searched_paths = []
for jp in jar_paths:
searched_paths.append(jp)
if os.path.exists(jp):
return jp
raise H2OStartupError("Cannot start local server: h2o.jar not found. Paths searched:\n" +
"".join(" %s\n" % s for s in searched_paths)) | def function[_find_jar, parameter[self, path0]]:
constant[
Return the location of an h2o.jar executable.
:param path0: Explicitly given h2o.jar path. If provided, then we will simply check whether the file is there,
otherwise we will search for an executable in locations returned by ._jar_paths().
:raises H2OStartupError: if no h2o.jar executable can be found.
]
variable[jar_paths] assign[=] <ast.IfExp object at 0x7da20cabc0a0>
variable[searched_paths] assign[=] list[[]]
for taget[name[jp]] in starred[name[jar_paths]] begin[:]
call[name[searched_paths].append, parameter[name[jp]]]
if call[name[os].path.exists, parameter[name[jp]]] begin[:]
return[name[jp]]
<ast.Raise object at 0x7da20cabfe50> | keyword[def] identifier[_find_jar] ( identifier[self] , identifier[path0] = keyword[None] ):
literal[string]
identifier[jar_paths] =[ identifier[path0] ] keyword[if] identifier[path0] keyword[else] identifier[self] . identifier[_jar_paths] ()
identifier[searched_paths] =[]
keyword[for] identifier[jp] keyword[in] identifier[jar_paths] :
identifier[searched_paths] . identifier[append] ( identifier[jp] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[jp] ):
keyword[return] identifier[jp]
keyword[raise] identifier[H2OStartupError] ( literal[string] +
literal[string] . identifier[join] ( literal[string] % identifier[s] keyword[for] identifier[s] keyword[in] identifier[searched_paths] )) | def _find_jar(self, path0=None):
"""
Return the location of an h2o.jar executable.
:param path0: Explicitly given h2o.jar path. If provided, then we will simply check whether the file is there,
otherwise we will search for an executable in locations returned by ._jar_paths().
:raises H2OStartupError: if no h2o.jar executable can be found.
"""
jar_paths = [path0] if path0 else self._jar_paths()
searched_paths = []
for jp in jar_paths:
searched_paths.append(jp)
if os.path.exists(jp):
return jp # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['jp']]
raise H2OStartupError('Cannot start local server: h2o.jar not found. Paths searched:\n' + ''.join((' %s\n' % s for s in searched_paths))) |
def merge(self, values, lists_only=False):
"""
Merges list-based attributes into one list including unique elements from both lists. When ``lists_only`` is
set to ``False``, updates dictionaries and overwrites single-value attributes. The resulting configuration
is 'clean', i.e. input values converted and validated. If the conversion is not possible, a ``ValueError`` is
raised.
:param values: Values to update the ConfigurationObject with.
:type values: dict | ConfigurationObject
:param lists_only: Ignore single-value attributes and update dictionary options.
:type lists_only: bool
"""
if isinstance(values, self.__class__):
self.merge_from_obj(values, lists_only=lists_only)
elif isinstance(values, dict):
self.merge_from_dict(values, lists_only=lists_only)
else:
raise ValueError("{0} or dictionary expected; found '{1}'.".format(self.__class__.__name__,
type(values).__name__)) | def function[merge, parameter[self, values, lists_only]]:
constant[
Merges list-based attributes into one list including unique elements from both lists. When ``lists_only`` is
set to ``False``, updates dictionaries and overwrites single-value attributes. The resulting configuration
is 'clean', i.e. input values converted and validated. If the conversion is not possible, a ``ValueError`` is
raised.
:param values: Values to update the ConfigurationObject with.
:type values: dict | ConfigurationObject
:param lists_only: Ignore single-value attributes and update dictionary options.
:type lists_only: bool
]
if call[name[isinstance], parameter[name[values], name[self].__class__]] begin[:]
call[name[self].merge_from_obj, parameter[name[values]]] | keyword[def] identifier[merge] ( identifier[self] , identifier[values] , identifier[lists_only] = keyword[False] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[values] , identifier[self] . identifier[__class__] ):
identifier[self] . identifier[merge_from_obj] ( identifier[values] , identifier[lists_only] = identifier[lists_only] )
keyword[elif] identifier[isinstance] ( identifier[values] , identifier[dict] ):
identifier[self] . identifier[merge_from_dict] ( identifier[values] , identifier[lists_only] = identifier[lists_only] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[self] . identifier[__class__] . identifier[__name__] ,
identifier[type] ( identifier[values] ). identifier[__name__] )) | def merge(self, values, lists_only=False):
"""
Merges list-based attributes into one list including unique elements from both lists. When ``lists_only`` is
set to ``False``, updates dictionaries and overwrites single-value attributes. The resulting configuration
is 'clean', i.e. input values converted and validated. If the conversion is not possible, a ``ValueError`` is
raised.
:param values: Values to update the ConfigurationObject with.
:type values: dict | ConfigurationObject
:param lists_only: Ignore single-value attributes and update dictionary options.
:type lists_only: bool
"""
if isinstance(values, self.__class__):
self.merge_from_obj(values, lists_only=lists_only) # depends on [control=['if'], data=[]]
elif isinstance(values, dict):
self.merge_from_dict(values, lists_only=lists_only) # depends on [control=['if'], data=[]]
else:
raise ValueError("{0} or dictionary expected; found '{1}'.".format(self.__class__.__name__, type(values).__name__)) |
def connect(self, callback, ref=False, position='first',
before=None, after=None):
""" Connect the callback to the event group. The callback will receive
events from *all* of the emitters in the group.
See :func:`EventEmitter.connect() <vispy.event.EventEmitter.connect>`
for arguments.
"""
self._connect_emitters(True)
return EventEmitter.connect(self, callback, ref, position,
before, after) | def function[connect, parameter[self, callback, ref, position, before, after]]:
constant[ Connect the callback to the event group. The callback will receive
events from *all* of the emitters in the group.
See :func:`EventEmitter.connect() <vispy.event.EventEmitter.connect>`
for arguments.
]
call[name[self]._connect_emitters, parameter[constant[True]]]
return[call[name[EventEmitter].connect, parameter[name[self], name[callback], name[ref], name[position], name[before], name[after]]]] | keyword[def] identifier[connect] ( identifier[self] , identifier[callback] , identifier[ref] = keyword[False] , identifier[position] = literal[string] ,
identifier[before] = keyword[None] , identifier[after] = keyword[None] ):
literal[string]
identifier[self] . identifier[_connect_emitters] ( keyword[True] )
keyword[return] identifier[EventEmitter] . identifier[connect] ( identifier[self] , identifier[callback] , identifier[ref] , identifier[position] ,
identifier[before] , identifier[after] ) | def connect(self, callback, ref=False, position='first', before=None, after=None):
""" Connect the callback to the event group. The callback will receive
events from *all* of the emitters in the group.
See :func:`EventEmitter.connect() <vispy.event.EventEmitter.connect>`
for arguments.
"""
self._connect_emitters(True)
return EventEmitter.connect(self, callback, ref, position, before, after) |
def save(self, obj, data, is_m2m=False):
"""
If this field is not declared readonly, the object's attribute will
be set to the value returned by :meth:`~import_export.fields.Field.clean`.
"""
if not self.readonly:
attrs = self.attribute.split('__')
for attr in attrs[:-1]:
obj = getattr(obj, attr, None)
cleaned = self.clean(data)
if cleaned is not None or self.saves_null_values:
if not is_m2m:
setattr(obj, attrs[-1], cleaned)
else:
getattr(obj, attrs[-1]).set(cleaned) | def function[save, parameter[self, obj, data, is_m2m]]:
constant[
If this field is not declared readonly, the object's attribute will
be set to the value returned by :meth:`~import_export.fields.Field.clean`.
]
if <ast.UnaryOp object at 0x7da1b1dc71c0> begin[:]
variable[attrs] assign[=] call[name[self].attribute.split, parameter[constant[__]]]
for taget[name[attr]] in starred[call[name[attrs]][<ast.Slice object at 0x7da1b1dc5a20>]] begin[:]
variable[obj] assign[=] call[name[getattr], parameter[name[obj], name[attr], constant[None]]]
variable[cleaned] assign[=] call[name[self].clean, parameter[name[data]]]
if <ast.BoolOp object at 0x7da1b1d05240> begin[:]
if <ast.UnaryOp object at 0x7da1b1d05de0> begin[:]
call[name[setattr], parameter[name[obj], call[name[attrs]][<ast.UnaryOp object at 0x7da1b1d04a60>], name[cleaned]]] | keyword[def] identifier[save] ( identifier[self] , identifier[obj] , identifier[data] , identifier[is_m2m] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[readonly] :
identifier[attrs] = identifier[self] . identifier[attribute] . identifier[split] ( literal[string] )
keyword[for] identifier[attr] keyword[in] identifier[attrs] [:- literal[int] ]:
identifier[obj] = identifier[getattr] ( identifier[obj] , identifier[attr] , keyword[None] )
identifier[cleaned] = identifier[self] . identifier[clean] ( identifier[data] )
keyword[if] identifier[cleaned] keyword[is] keyword[not] keyword[None] keyword[or] identifier[self] . identifier[saves_null_values] :
keyword[if] keyword[not] identifier[is_m2m] :
identifier[setattr] ( identifier[obj] , identifier[attrs] [- literal[int] ], identifier[cleaned] )
keyword[else] :
identifier[getattr] ( identifier[obj] , identifier[attrs] [- literal[int] ]). identifier[set] ( identifier[cleaned] ) | def save(self, obj, data, is_m2m=False):
"""
If this field is not declared readonly, the object's attribute will
be set to the value returned by :meth:`~import_export.fields.Field.clean`.
"""
if not self.readonly:
attrs = self.attribute.split('__')
for attr in attrs[:-1]:
obj = getattr(obj, attr, None) # depends on [control=['for'], data=['attr']]
cleaned = self.clean(data)
if cleaned is not None or self.saves_null_values:
if not is_m2m:
setattr(obj, attrs[-1], cleaned) # depends on [control=['if'], data=[]]
else:
getattr(obj, attrs[-1]).set(cleaned) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def expected_magx(RAW_IMU, ATTITUDE, inclination, declination):
'''estimate from mag'''
v = expected_mag(RAW_IMU, ATTITUDE, inclination, declination)
return v.x | def function[expected_magx, parameter[RAW_IMU, ATTITUDE, inclination, declination]]:
constant[estimate from mag]
variable[v] assign[=] call[name[expected_mag], parameter[name[RAW_IMU], name[ATTITUDE], name[inclination], name[declination]]]
return[name[v].x] | keyword[def] identifier[expected_magx] ( identifier[RAW_IMU] , identifier[ATTITUDE] , identifier[inclination] , identifier[declination] ):
literal[string]
identifier[v] = identifier[expected_mag] ( identifier[RAW_IMU] , identifier[ATTITUDE] , identifier[inclination] , identifier[declination] )
keyword[return] identifier[v] . identifier[x] | def expected_magx(RAW_IMU, ATTITUDE, inclination, declination):
"""estimate from mag"""
v = expected_mag(RAW_IMU, ATTITUDE, inclination, declination)
return v.x |
def wrap_aside(self, block, aside, view, frag, context): # pylint: disable=unused-argument
"""
Creates a div which identifies the aside, points to the original block,
and writes out the json_init_args into a script tag.
The default implementation creates a frag to wraps frag w/ a div identifying the xblock. If you have
javascript, you'll need to override this impl
"""
return self._wrap_ele(
aside, view, frag, {
'block_id': block.scope_ids.usage_id,
'url_selector': 'asideBaseUrl',
}) | def function[wrap_aside, parameter[self, block, aside, view, frag, context]]:
constant[
Creates a div which identifies the aside, points to the original block,
and writes out the json_init_args into a script tag.
The default implementation creates a frag to wraps frag w/ a div identifying the xblock. If you have
javascript, you'll need to override this impl
]
return[call[name[self]._wrap_ele, parameter[name[aside], name[view], name[frag], dictionary[[<ast.Constant object at 0x7da18dc05d80>, <ast.Constant object at 0x7da18fe92410>], [<ast.Attribute object at 0x7da18fe92950>, <ast.Constant object at 0x7da18fe92350>]]]]] | keyword[def] identifier[wrap_aside] ( identifier[self] , identifier[block] , identifier[aside] , identifier[view] , identifier[frag] , identifier[context] ):
literal[string]
keyword[return] identifier[self] . identifier[_wrap_ele] (
identifier[aside] , identifier[view] , identifier[frag] ,{
literal[string] : identifier[block] . identifier[scope_ids] . identifier[usage_id] ,
literal[string] : literal[string] ,
}) | def wrap_aside(self, block, aside, view, frag, context): # pylint: disable=unused-argument
"\n Creates a div which identifies the aside, points to the original block,\n and writes out the json_init_args into a script tag.\n\n The default implementation creates a frag to wraps frag w/ a div identifying the xblock. If you have\n javascript, you'll need to override this impl\n "
return self._wrap_ele(aside, view, frag, {'block_id': block.scope_ids.usage_id, 'url_selector': 'asideBaseUrl'}) |
def copy_magic_into_pyc(input_pyc, output_pyc,
src_version, dest_version):
"""Bytecodes are the same except the magic number, so just change
that"""
(version, timestamp, magic_int,
co, is_pypy, source_size) = load_module(input_pyc)
assert version == float(src_version), (
"Need Python %s bytecode; got bytecode for version %s" %
(src_version, version))
magic_int = magic2int(magics[dest_version])
write_bytecode_file(output_pyc, co, magic_int)
print("Wrote %s" % output_pyc)
return | def function[copy_magic_into_pyc, parameter[input_pyc, output_pyc, src_version, dest_version]]:
constant[Bytecodes are the same except the magic number, so just change
that]
<ast.Tuple object at 0x7da1b2679d50> assign[=] call[name[load_module], parameter[name[input_pyc]]]
assert[compare[name[version] equal[==] call[name[float], parameter[name[src_version]]]]]
variable[magic_int] assign[=] call[name[magic2int], parameter[call[name[magics]][name[dest_version]]]]
call[name[write_bytecode_file], parameter[name[output_pyc], name[co], name[magic_int]]]
call[name[print], parameter[binary_operation[constant[Wrote %s] <ast.Mod object at 0x7da2590d6920> name[output_pyc]]]]
return[None] | keyword[def] identifier[copy_magic_into_pyc] ( identifier[input_pyc] , identifier[output_pyc] ,
identifier[src_version] , identifier[dest_version] ):
literal[string]
( identifier[version] , identifier[timestamp] , identifier[magic_int] ,
identifier[co] , identifier[is_pypy] , identifier[source_size] )= identifier[load_module] ( identifier[input_pyc] )
keyword[assert] identifier[version] == identifier[float] ( identifier[src_version] ),(
literal[string] %
( identifier[src_version] , identifier[version] ))
identifier[magic_int] = identifier[magic2int] ( identifier[magics] [ identifier[dest_version] ])
identifier[write_bytecode_file] ( identifier[output_pyc] , identifier[co] , identifier[magic_int] )
identifier[print] ( literal[string] % identifier[output_pyc] )
keyword[return] | def copy_magic_into_pyc(input_pyc, output_pyc, src_version, dest_version):
"""Bytecodes are the same except the magic number, so just change
that"""
(version, timestamp, magic_int, co, is_pypy, source_size) = load_module(input_pyc)
assert version == float(src_version), 'Need Python %s bytecode; got bytecode for version %s' % (src_version, version)
magic_int = magic2int(magics[dest_version])
write_bytecode_file(output_pyc, co, magic_int)
print('Wrote %s' % output_pyc)
return |
def retrieve_from_cache(self):
"""Try to retrieve the node's content from a cache
This method is called from multiple threads in a parallel build,
so only do thread safe stuff here. Do thread unsafe stuff in
built().
Returns true if the node was successfully retrieved.
"""
if self.nocache:
return None
if not self.is_derived():
return None
return self.get_build_env().get_CacheDir().retrieve(self) | def function[retrieve_from_cache, parameter[self]]:
constant[Try to retrieve the node's content from a cache
This method is called from multiple threads in a parallel build,
so only do thread safe stuff here. Do thread unsafe stuff in
built().
Returns true if the node was successfully retrieved.
]
if name[self].nocache begin[:]
return[constant[None]]
if <ast.UnaryOp object at 0x7da2047e96c0> begin[:]
return[constant[None]]
return[call[call[call[name[self].get_build_env, parameter[]].get_CacheDir, parameter[]].retrieve, parameter[name[self]]]] | keyword[def] identifier[retrieve_from_cache] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[nocache] :
keyword[return] keyword[None]
keyword[if] keyword[not] identifier[self] . identifier[is_derived] ():
keyword[return] keyword[None]
keyword[return] identifier[self] . identifier[get_build_env] (). identifier[get_CacheDir] (). identifier[retrieve] ( identifier[self] ) | def retrieve_from_cache(self):
"""Try to retrieve the node's content from a cache
This method is called from multiple threads in a parallel build,
so only do thread safe stuff here. Do thread unsafe stuff in
built().
Returns true if the node was successfully retrieved.
"""
if self.nocache:
return None # depends on [control=['if'], data=[]]
if not self.is_derived():
return None # depends on [control=['if'], data=[]]
return self.get_build_env().get_CacheDir().retrieve(self) |
def _set_line_eol(src, line):
'''
Add line ending
'''
line_ending = _get_eol(src) or os.linesep
return line.rstrip() + line_ending | def function[_set_line_eol, parameter[src, line]]:
constant[
Add line ending
]
variable[line_ending] assign[=] <ast.BoolOp object at 0x7da1b26aeb90>
return[binary_operation[call[name[line].rstrip, parameter[]] + name[line_ending]]] | keyword[def] identifier[_set_line_eol] ( identifier[src] , identifier[line] ):
literal[string]
identifier[line_ending] = identifier[_get_eol] ( identifier[src] ) keyword[or] identifier[os] . identifier[linesep]
keyword[return] identifier[line] . identifier[rstrip] ()+ identifier[line_ending] | def _set_line_eol(src, line):
"""
Add line ending
"""
line_ending = _get_eol(src) or os.linesep
return line.rstrip() + line_ending |
def explode(self):
"""Explode realms with each realm_members and higher_realms to get all the
realms sub realms.
:return: None
"""
# Manage higher realms where defined
for realm in [tmp_realm for tmp_realm in self if tmp_realm.higher_realms]:
for parent in realm.higher_realms:
higher_realm = self.find_by_name(parent)
if higher_realm:
# Add the realm to its parent realm members
higher_realm.realm_members.append(realm.get_name())
for realm in self:
# Set a recursion tag to protect against loop
for tmp_realm in self:
tmp_realm.rec_tag = False
realm.get_realms_by_explosion(self)
# Clean the recursion tag
for tmp_realm in self:
del tmp_realm.rec_tag | def function[explode, parameter[self]]:
constant[Explode realms with each realm_members and higher_realms to get all the
realms sub realms.
:return: None
]
for taget[name[realm]] in starred[<ast.ListComp object at 0x7da18f720700>] begin[:]
for taget[name[parent]] in starred[name[realm].higher_realms] begin[:]
variable[higher_realm] assign[=] call[name[self].find_by_name, parameter[name[parent]]]
if name[higher_realm] begin[:]
call[name[higher_realm].realm_members.append, parameter[call[name[realm].get_name, parameter[]]]]
for taget[name[realm]] in starred[name[self]] begin[:]
for taget[name[tmp_realm]] in starred[name[self]] begin[:]
name[tmp_realm].rec_tag assign[=] constant[False]
call[name[realm].get_realms_by_explosion, parameter[name[self]]]
for taget[name[tmp_realm]] in starred[name[self]] begin[:]
<ast.Delete object at 0x7da18f720cd0> | keyword[def] identifier[explode] ( identifier[self] ):
literal[string]
keyword[for] identifier[realm] keyword[in] [ identifier[tmp_realm] keyword[for] identifier[tmp_realm] keyword[in] identifier[self] keyword[if] identifier[tmp_realm] . identifier[higher_realms] ]:
keyword[for] identifier[parent] keyword[in] identifier[realm] . identifier[higher_realms] :
identifier[higher_realm] = identifier[self] . identifier[find_by_name] ( identifier[parent] )
keyword[if] identifier[higher_realm] :
identifier[higher_realm] . identifier[realm_members] . identifier[append] ( identifier[realm] . identifier[get_name] ())
keyword[for] identifier[realm] keyword[in] identifier[self] :
keyword[for] identifier[tmp_realm] keyword[in] identifier[self] :
identifier[tmp_realm] . identifier[rec_tag] = keyword[False]
identifier[realm] . identifier[get_realms_by_explosion] ( identifier[self] )
keyword[for] identifier[tmp_realm] keyword[in] identifier[self] :
keyword[del] identifier[tmp_realm] . identifier[rec_tag] | def explode(self):
"""Explode realms with each realm_members and higher_realms to get all the
realms sub realms.
:return: None
"""
# Manage higher realms where defined
for realm in [tmp_realm for tmp_realm in self if tmp_realm.higher_realms]:
for parent in realm.higher_realms:
higher_realm = self.find_by_name(parent)
if higher_realm:
# Add the realm to its parent realm members
higher_realm.realm_members.append(realm.get_name()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['parent']] # depends on [control=['for'], data=['realm']]
for realm in self:
# Set a recursion tag to protect against loop
for tmp_realm in self:
tmp_realm.rec_tag = False # depends on [control=['for'], data=['tmp_realm']]
realm.get_realms_by_explosion(self) # depends on [control=['for'], data=['realm']]
# Clean the recursion tag
for tmp_realm in self:
del tmp_realm.rec_tag # depends on [control=['for'], data=['tmp_realm']] |
def write_branch_data(self, file):
""" Writes branch data as CSV.
"""
writer = self._get_writer(file)
writer.writerow(BRANCH_ATTRS)
for branch in self.case.branches:
writer.writerow([getattr(branch, a) for a in BRANCH_ATTRS]) | def function[write_branch_data, parameter[self, file]]:
constant[ Writes branch data as CSV.
]
variable[writer] assign[=] call[name[self]._get_writer, parameter[name[file]]]
call[name[writer].writerow, parameter[name[BRANCH_ATTRS]]]
for taget[name[branch]] in starred[name[self].case.branches] begin[:]
call[name[writer].writerow, parameter[<ast.ListComp object at 0x7da20c6aa920>]] | keyword[def] identifier[write_branch_data] ( identifier[self] , identifier[file] ):
literal[string]
identifier[writer] = identifier[self] . identifier[_get_writer] ( identifier[file] )
identifier[writer] . identifier[writerow] ( identifier[BRANCH_ATTRS] )
keyword[for] identifier[branch] keyword[in] identifier[self] . identifier[case] . identifier[branches] :
identifier[writer] . identifier[writerow] ([ identifier[getattr] ( identifier[branch] , identifier[a] ) keyword[for] identifier[a] keyword[in] identifier[BRANCH_ATTRS] ]) | def write_branch_data(self, file):
""" Writes branch data as CSV.
"""
writer = self._get_writer(file)
writer.writerow(BRANCH_ATTRS)
for branch in self.case.branches:
writer.writerow([getattr(branch, a) for a in BRANCH_ATTRS]) # depends on [control=['for'], data=['branch']] |
def dbStore(self, typ, py_value):
"""
Converts the value to one that is safe to store on a record within
the record values dictionary
:param value | <variant>
:return <variant>
"""
if isinstance(py_value, datetime.datetime):
return time.mktime(py_value.timetuple())
else:
return super(UTC_TimestampColumn, self).dbStore(typ, py_value) | def function[dbStore, parameter[self, typ, py_value]]:
constant[
Converts the value to one that is safe to store on a record within
the record values dictionary
:param value | <variant>
:return <variant>
]
if call[name[isinstance], parameter[name[py_value], name[datetime].datetime]] begin[:]
return[call[name[time].mktime, parameter[call[name[py_value].timetuple, parameter[]]]]] | keyword[def] identifier[dbStore] ( identifier[self] , identifier[typ] , identifier[py_value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[py_value] , identifier[datetime] . identifier[datetime] ):
keyword[return] identifier[time] . identifier[mktime] ( identifier[py_value] . identifier[timetuple] ())
keyword[else] :
keyword[return] identifier[super] ( identifier[UTC_TimestampColumn] , identifier[self] ). identifier[dbStore] ( identifier[typ] , identifier[py_value] ) | def dbStore(self, typ, py_value):
"""
Converts the value to one that is safe to store on a record within
the record values dictionary
:param value | <variant>
:return <variant>
"""
if isinstance(py_value, datetime.datetime):
return time.mktime(py_value.timetuple()) # depends on [control=['if'], data=[]]
else:
return super(UTC_TimestampColumn, self).dbStore(typ, py_value) |
def register(self, ModelClass, form_field=None, widget=None, title=None, prefix=None, has_id_value=True):
"""
Register a custom model with the ``AnyUrlField``.
"""
if any(urltype.model == ModelClass for urltype in self._url_types):
raise ValueError("Model is already registered: '{0}'".format(ModelClass))
opts = ModelClass._meta
opts = opts.concrete_model._meta
if not prefix:
# Store something descriptive, easier to lookup from raw database content.
prefix = '{0}.{1}'.format(opts.app_label, opts.object_name.lower())
if not title:
title = ModelClass._meta.verbose_name
if self.is_external_url_prefix(prefix):
raise ValueError("Invalid prefix value: '{0}'.".format(prefix))
if self[prefix] is not None:
raise ValueError("Prefix is already registered: '{0}'".format(prefix))
if form_field is not None and widget is not None:
raise ValueError("Provide either a form_field or widget; use the widget parameter of the form field instead.")
urltype = UrlType(ModelClass, form_field, widget, title, prefix, has_id_value)
signals.post_save.connect(_on_model_save, sender=ModelClass)
self._url_types.append(urltype)
return urltype | def function[register, parameter[self, ModelClass, form_field, widget, title, prefix, has_id_value]]:
constant[
Register a custom model with the ``AnyUrlField``.
]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da18eb556c0>]] begin[:]
<ast.Raise object at 0x7da18eb54460>
variable[opts] assign[=] name[ModelClass]._meta
variable[opts] assign[=] name[opts].concrete_model._meta
if <ast.UnaryOp object at 0x7da18eb54490> begin[:]
variable[prefix] assign[=] call[constant[{0}.{1}].format, parameter[name[opts].app_label, call[name[opts].object_name.lower, parameter[]]]]
if <ast.UnaryOp object at 0x7da18eb551b0> begin[:]
variable[title] assign[=] name[ModelClass]._meta.verbose_name
if call[name[self].is_external_url_prefix, parameter[name[prefix]]] begin[:]
<ast.Raise object at 0x7da2045659c0>
if compare[call[name[self]][name[prefix]] is_not constant[None]] begin[:]
<ast.Raise object at 0x7da2045671f0>
if <ast.BoolOp object at 0x7da2043447c0> begin[:]
<ast.Raise object at 0x7da2043457e0>
variable[urltype] assign[=] call[name[UrlType], parameter[name[ModelClass], name[form_field], name[widget], name[title], name[prefix], name[has_id_value]]]
call[name[signals].post_save.connect, parameter[name[_on_model_save]]]
call[name[self]._url_types.append, parameter[name[urltype]]]
return[name[urltype]] | keyword[def] identifier[register] ( identifier[self] , identifier[ModelClass] , identifier[form_field] = keyword[None] , identifier[widget] = keyword[None] , identifier[title] = keyword[None] , identifier[prefix] = keyword[None] , identifier[has_id_value] = keyword[True] ):
literal[string]
keyword[if] identifier[any] ( identifier[urltype] . identifier[model] == identifier[ModelClass] keyword[for] identifier[urltype] keyword[in] identifier[self] . identifier[_url_types] ):
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[ModelClass] ))
identifier[opts] = identifier[ModelClass] . identifier[_meta]
identifier[opts] = identifier[opts] . identifier[concrete_model] . identifier[_meta]
keyword[if] keyword[not] identifier[prefix] :
identifier[prefix] = literal[string] . identifier[format] ( identifier[opts] . identifier[app_label] , identifier[opts] . identifier[object_name] . identifier[lower] ())
keyword[if] keyword[not] identifier[title] :
identifier[title] = identifier[ModelClass] . identifier[_meta] . identifier[verbose_name]
keyword[if] identifier[self] . identifier[is_external_url_prefix] ( identifier[prefix] ):
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[prefix] ))
keyword[if] identifier[self] [ identifier[prefix] ] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[prefix] ))
keyword[if] identifier[form_field] keyword[is] keyword[not] keyword[None] keyword[and] identifier[widget] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[urltype] = identifier[UrlType] ( identifier[ModelClass] , identifier[form_field] , identifier[widget] , identifier[title] , identifier[prefix] , identifier[has_id_value] )
identifier[signals] . identifier[post_save] . identifier[connect] ( identifier[_on_model_save] , identifier[sender] = identifier[ModelClass] )
identifier[self] . identifier[_url_types] . identifier[append] ( identifier[urltype] )
keyword[return] identifier[urltype] | def register(self, ModelClass, form_field=None, widget=None, title=None, prefix=None, has_id_value=True):
"""
Register a custom model with the ``AnyUrlField``.
"""
if any((urltype.model == ModelClass for urltype in self._url_types)):
raise ValueError("Model is already registered: '{0}'".format(ModelClass)) # depends on [control=['if'], data=[]]
opts = ModelClass._meta
opts = opts.concrete_model._meta
if not prefix:
# Store something descriptive, easier to lookup from raw database content.
prefix = '{0}.{1}'.format(opts.app_label, opts.object_name.lower()) # depends on [control=['if'], data=[]]
if not title:
title = ModelClass._meta.verbose_name # depends on [control=['if'], data=[]]
if self.is_external_url_prefix(prefix):
raise ValueError("Invalid prefix value: '{0}'.".format(prefix)) # depends on [control=['if'], data=[]]
if self[prefix] is not None:
raise ValueError("Prefix is already registered: '{0}'".format(prefix)) # depends on [control=['if'], data=[]]
if form_field is not None and widget is not None:
raise ValueError('Provide either a form_field or widget; use the widget parameter of the form field instead.') # depends on [control=['if'], data=[]]
urltype = UrlType(ModelClass, form_field, widget, title, prefix, has_id_value)
signals.post_save.connect(_on_model_save, sender=ModelClass)
self._url_types.append(urltype)
return urltype |
def conversations(self, getsrcdst=None, **kargs):
"""Graphes a conversations between sources and destinations and display it
(using graphviz and imagemagick)
getsrcdst: a function that takes an element of the list and
returns the source, the destination and optionally
a label. By default, returns the IP source and
destination from IP and ARP layers
type: output type (svg, ps, gif, jpg, etc.), passed to dot's "-T" option # noqa: E501
target: filename or redirect. Defaults pipe to Imagemagick's display program # noqa: E501
prog: which graphviz program to use"""
if getsrcdst is None:
def getsrcdst(pkt):
"""Extract src and dst addresses"""
if 'IP' in pkt:
return (pkt['IP'].src, pkt['IP'].dst)
if 'IPv6' in pkt:
return (pkt['IPv6'].src, pkt['IPv6'].dst)
if 'ARP' in pkt:
return (pkt['ARP'].psrc, pkt['ARP'].pdst)
raise TypeError()
conv = {}
for p in self.res:
p = self._elt2pkt(p)
try:
c = getsrcdst(p)
except Exception:
# No warning here: it's OK that getsrcdst() raises an
# exception, since it might be, for example, a
# function that expects a specific layer in each
# packet. The try/except approach is faster and
# considered more Pythonic than adding tests.
continue
if len(c) == 3:
conv.setdefault(c[:2], set()).add(c[2])
else:
conv[c] = conv.get(c, 0) + 1
gr = 'digraph "conv" {\n'
for (s, d), l in six.iteritems(conv):
gr += '\t "%s" -> "%s" [label="%s"]\n' % (
s, d, ', '.join(str(x) for x in l) if isinstance(l, set) else l
)
gr += "}\n"
return do_graph(gr, **kargs) | def function[conversations, parameter[self, getsrcdst]]:
constant[Graphes a conversations between sources and destinations and display it
(using graphviz and imagemagick)
getsrcdst: a function that takes an element of the list and
returns the source, the destination and optionally
a label. By default, returns the IP source and
destination from IP and ARP layers
type: output type (svg, ps, gif, jpg, etc.), passed to dot's "-T" option # noqa: E501
target: filename or redirect. Defaults pipe to Imagemagick's display program # noqa: E501
prog: which graphviz program to use]
if compare[name[getsrcdst] is constant[None]] begin[:]
def function[getsrcdst, parameter[pkt]]:
constant[Extract src and dst addresses]
if compare[constant[IP] in name[pkt]] begin[:]
return[tuple[[<ast.Attribute object at 0x7da1b215f520>, <ast.Attribute object at 0x7da1b215f1c0>]]]
if compare[constant[IPv6] in name[pkt]] begin[:]
return[tuple[[<ast.Attribute object at 0x7da1b215fe20>, <ast.Attribute object at 0x7da1b215fe80>]]]
if compare[constant[ARP] in name[pkt]] begin[:]
return[tuple[[<ast.Attribute object at 0x7da1b215f9d0>, <ast.Attribute object at 0x7da1b215fca0>]]]
<ast.Raise object at 0x7da1b215f3a0>
variable[conv] assign[=] dictionary[[], []]
for taget[name[p]] in starred[name[self].res] begin[:]
variable[p] assign[=] call[name[self]._elt2pkt, parameter[name[p]]]
<ast.Try object at 0x7da1b215f430>
if compare[call[name[len], parameter[name[c]]] equal[==] constant[3]] begin[:]
call[call[name[conv].setdefault, parameter[call[name[c]][<ast.Slice object at 0x7da1b215c190>], call[name[set], parameter[]]]].add, parameter[call[name[c]][constant[2]]]]
variable[gr] assign[=] constant[digraph "conv" {
]
for taget[tuple[[<ast.Tuple object at 0x7da1b215c550>, <ast.Name object at 0x7da1b215c490>]]] in starred[call[name[six].iteritems, parameter[name[conv]]]] begin[:]
<ast.AugAssign object at 0x7da1b215c7c0>
<ast.AugAssign object at 0x7da1b215c670>
return[call[name[do_graph], parameter[name[gr]]]] | keyword[def] identifier[conversations] ( identifier[self] , identifier[getsrcdst] = keyword[None] ,** identifier[kargs] ):
literal[string]
keyword[if] identifier[getsrcdst] keyword[is] keyword[None] :
keyword[def] identifier[getsrcdst] ( identifier[pkt] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[pkt] :
keyword[return] ( identifier[pkt] [ literal[string] ]. identifier[src] , identifier[pkt] [ literal[string] ]. identifier[dst] )
keyword[if] literal[string] keyword[in] identifier[pkt] :
keyword[return] ( identifier[pkt] [ literal[string] ]. identifier[src] , identifier[pkt] [ literal[string] ]. identifier[dst] )
keyword[if] literal[string] keyword[in] identifier[pkt] :
keyword[return] ( identifier[pkt] [ literal[string] ]. identifier[psrc] , identifier[pkt] [ literal[string] ]. identifier[pdst] )
keyword[raise] identifier[TypeError] ()
identifier[conv] ={}
keyword[for] identifier[p] keyword[in] identifier[self] . identifier[res] :
identifier[p] = identifier[self] . identifier[_elt2pkt] ( identifier[p] )
keyword[try] :
identifier[c] = identifier[getsrcdst] ( identifier[p] )
keyword[except] identifier[Exception] :
keyword[continue]
keyword[if] identifier[len] ( identifier[c] )== literal[int] :
identifier[conv] . identifier[setdefault] ( identifier[c] [: literal[int] ], identifier[set] ()). identifier[add] ( identifier[c] [ literal[int] ])
keyword[else] :
identifier[conv] [ identifier[c] ]= identifier[conv] . identifier[get] ( identifier[c] , literal[int] )+ literal[int]
identifier[gr] = literal[string]
keyword[for] ( identifier[s] , identifier[d] ), identifier[l] keyword[in] identifier[six] . identifier[iteritems] ( identifier[conv] ):
identifier[gr] += literal[string] %(
identifier[s] , identifier[d] , literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[l] ) keyword[if] identifier[isinstance] ( identifier[l] , identifier[set] ) keyword[else] identifier[l]
)
identifier[gr] += literal[string]
keyword[return] identifier[do_graph] ( identifier[gr] ,** identifier[kargs] ) | def conversations(self, getsrcdst=None, **kargs):
"""Graphes a conversations between sources and destinations and display it
(using graphviz and imagemagick)
getsrcdst: a function that takes an element of the list and
returns the source, the destination and optionally
a label. By default, returns the IP source and
destination from IP and ARP layers
type: output type (svg, ps, gif, jpg, etc.), passed to dot's "-T" option # noqa: E501
target: filename or redirect. Defaults pipe to Imagemagick's display program # noqa: E501
prog: which graphviz program to use"""
if getsrcdst is None:
def getsrcdst(pkt):
"""Extract src and dst addresses"""
if 'IP' in pkt:
return (pkt['IP'].src, pkt['IP'].dst) # depends on [control=['if'], data=['pkt']]
if 'IPv6' in pkt:
return (pkt['IPv6'].src, pkt['IPv6'].dst) # depends on [control=['if'], data=['pkt']]
if 'ARP' in pkt:
return (pkt['ARP'].psrc, pkt['ARP'].pdst) # depends on [control=['if'], data=['pkt']]
raise TypeError() # depends on [control=['if'], data=[]]
conv = {}
for p in self.res:
p = self._elt2pkt(p)
try:
c = getsrcdst(p) # depends on [control=['try'], data=[]]
except Exception:
# No warning here: it's OK that getsrcdst() raises an
# exception, since it might be, for example, a
# function that expects a specific layer in each
# packet. The try/except approach is faster and
# considered more Pythonic than adding tests.
continue # depends on [control=['except'], data=[]]
if len(c) == 3:
conv.setdefault(c[:2], set()).add(c[2]) # depends on [control=['if'], data=[]]
else:
conv[c] = conv.get(c, 0) + 1 # depends on [control=['for'], data=['p']]
gr = 'digraph "conv" {\n'
for ((s, d), l) in six.iteritems(conv):
gr += '\t "%s" -> "%s" [label="%s"]\n' % (s, d, ', '.join((str(x) for x in l)) if isinstance(l, set) else l) # depends on [control=['for'], data=[]]
gr += '}\n'
return do_graph(gr, **kargs) |
def restore_from_cluster_snapshot(self, cluster_identifier, snapshot_identifier):
"""
Restores a cluster from its snapshot
:param cluster_identifier: unique identifier of a cluster
:type cluster_identifier: str
:param snapshot_identifier: unique identifier for a snapshot of a cluster
:type snapshot_identifier: str
"""
response = self.get_conn().restore_from_cluster_snapshot(
ClusterIdentifier=cluster_identifier,
SnapshotIdentifier=snapshot_identifier
)
return response['Cluster'] if response['Cluster'] else None | def function[restore_from_cluster_snapshot, parameter[self, cluster_identifier, snapshot_identifier]]:
constant[
Restores a cluster from its snapshot
:param cluster_identifier: unique identifier of a cluster
:type cluster_identifier: str
:param snapshot_identifier: unique identifier for a snapshot of a cluster
:type snapshot_identifier: str
]
variable[response] assign[=] call[call[name[self].get_conn, parameter[]].restore_from_cluster_snapshot, parameter[]]
return[<ast.IfExp object at 0x7da1b03f9f90>] | keyword[def] identifier[restore_from_cluster_snapshot] ( identifier[self] , identifier[cluster_identifier] , identifier[snapshot_identifier] ):
literal[string]
identifier[response] = identifier[self] . identifier[get_conn] (). identifier[restore_from_cluster_snapshot] (
identifier[ClusterIdentifier] = identifier[cluster_identifier] ,
identifier[SnapshotIdentifier] = identifier[snapshot_identifier]
)
keyword[return] identifier[response] [ literal[string] ] keyword[if] identifier[response] [ literal[string] ] keyword[else] keyword[None] | def restore_from_cluster_snapshot(self, cluster_identifier, snapshot_identifier):
"""
Restores a cluster from its snapshot
:param cluster_identifier: unique identifier of a cluster
:type cluster_identifier: str
:param snapshot_identifier: unique identifier for a snapshot of a cluster
:type snapshot_identifier: str
"""
response = self.get_conn().restore_from_cluster_snapshot(ClusterIdentifier=cluster_identifier, SnapshotIdentifier=snapshot_identifier)
return response['Cluster'] if response['Cluster'] else None |
def read_input_file(filename, sep='\t', noquote=False):
"""Reads a given inputfile (tab delimited) and returns a matrix
(list of list).
arg: filename, the complete path to the inputfile to read
"""
output = []
stream = None
try:
stream = open(filename, 'r')
for row in stream:
row = row.strip()
if noquote:
row = row.replace('"', '')
output.append(row.split(sep))
except IOError as err: # pragma: no cover
LOG.info("Something wrong happend while reading the file %s "
% filename)
LOG.debug("ERROR: %s" % err)
finally:
if stream:
stream.close()
return output | def function[read_input_file, parameter[filename, sep, noquote]]:
constant[Reads a given inputfile (tab delimited) and returns a matrix
(list of list).
arg: filename, the complete path to the inputfile to read
]
variable[output] assign[=] list[[]]
variable[stream] assign[=] constant[None]
<ast.Try object at 0x7da207f9b250>
return[name[output]] | keyword[def] identifier[read_input_file] ( identifier[filename] , identifier[sep] = literal[string] , identifier[noquote] = keyword[False] ):
literal[string]
identifier[output] =[]
identifier[stream] = keyword[None]
keyword[try] :
identifier[stream] = identifier[open] ( identifier[filename] , literal[string] )
keyword[for] identifier[row] keyword[in] identifier[stream] :
identifier[row] = identifier[row] . identifier[strip] ()
keyword[if] identifier[noquote] :
identifier[row] = identifier[row] . identifier[replace] ( literal[string] , literal[string] )
identifier[output] . identifier[append] ( identifier[row] . identifier[split] ( identifier[sep] ))
keyword[except] identifier[IOError] keyword[as] identifier[err] :
identifier[LOG] . identifier[info] ( literal[string]
% identifier[filename] )
identifier[LOG] . identifier[debug] ( literal[string] % identifier[err] )
keyword[finally] :
keyword[if] identifier[stream] :
identifier[stream] . identifier[close] ()
keyword[return] identifier[output] | def read_input_file(filename, sep='\t', noquote=False):
"""Reads a given inputfile (tab delimited) and returns a matrix
(list of list).
arg: filename, the complete path to the inputfile to read
"""
output = []
stream = None
try:
stream = open(filename, 'r')
for row in stream:
row = row.strip()
if noquote:
row = row.replace('"', '') # depends on [control=['if'], data=[]]
output.append(row.split(sep)) # depends on [control=['for'], data=['row']] # depends on [control=['try'], data=[]]
except IOError as err: # pragma: no cover
LOG.info('Something wrong happend while reading the file %s ' % filename)
LOG.debug('ERROR: %s' % err) # depends on [control=['except'], data=['err']]
finally:
if stream:
stream.close() # depends on [control=['if'], data=[]]
return output |
def obj_tpr(result, reference, connectivity=1):
"""
The true positive rate of distinct binary object detection.
The true positive rates gives a percentage measure of how many distinct binary
objects in the first array also exists in the second array. A partial overlap
(of minimum one voxel) is here considered sufficient.
In cases where two distinct binary object in the first array overlaps with a single
distinct object in the second array, only one is considered to have been detected
successfully.
Parameters
----------
result : array_like
Input data containing objects. Can be any type but will be converted
into binary: background where 0, object everywhere else.
reference : array_like
Input data containing objects. Can be any type but will be converted
into binary: background where 0, object everywhere else.
connectivity : int
The neighbourhood/connectivity considered when determining what accounts
for a distinct binary object. This value is passed to
`scipy.ndimage.morphology.generate_binary_structure` and should usually be :math:`> 1`.
The decision on the connectivity is important, as it can influence the results
strongly. If in doubt, leave it as it is.
Returns
-------
tpr : float
A percentage measure of how many distinct binary objects in ``result`` also exists
in ``reference``. It has the range :math:`[0, 1]`, where a :math:`1` denotes an ideal score.
Raises
------
RuntimeError
If the reference object is empty.
See also
--------
:func:`obj_fpr`
Notes
-----
This is not a real metric, as it is directed. Whatever array is considered as
reference should be passed second. A perfect score of :math:`1` tells that all distinct
binary objects in the reference array also exist in the result array, but does not
reveal anything about additional binary objects in the result array
(use :func:`obj_fpr` for this).
Examples
--------
>>> arr2 = numpy.asarray([[1,0,0],[1,0,1],[0,0,1]])
>>> arr1 = numpy.asarray([[0,0,1],[1,0,1],[0,0,1]])
>>> arr2
array([[1, 0, 0],
[1, 0, 1],
[0, 0, 1]])
>>> arr1
array([[0, 0, 1],
[1, 0, 1],
[0, 0, 1]])
>>> obj_tpr(arr1, arr2)
1.0
>>> obj_tpr(arr2, arr1)
1.0
Example of directedness:
>>> arr2 = numpy.asarray([1,0,1,0,1])
>>> arr1 = numpy.asarray([1,0,1,0,0])
>>> obj_tpr(arr1, arr2)
0.6666666666666666
>>> obj_tpr(arr2, arr1)
1.0
Examples of multiple overlap treatment:
>>> arr2 = numpy.asarray([1,0,1,0,1,1,1])
>>> arr1 = numpy.asarray([1,1,1,0,1,0,1])
>>> obj_tpr(arr1, arr2)
0.6666666666666666
>>> obj_tpr(arr2, arr1)
0.6666666666666666
>>> arr2 = numpy.asarray([1,0,1,1,1,0,1])
>>> arr1 = numpy.asarray([1,1,1,0,1,1,1])
>>> obj_tpr(arr1, arr2)
0.6666666666666666
>>> obj_tpr(arr2, arr1)
1.0
>>> arr2 = numpy.asarray([[1,0,1,0,0],
[1,0,0,0,0],
[1,0,1,1,1],
[0,0,0,0,0],
[1,0,1,0,0]])
>>> arr1 = numpy.asarray([[1,1,1,0,0],
[0,0,0,0,0],
[1,1,1,0,1],
[0,0,0,0,0],
[1,1,1,0,0]])
>>> obj_tpr(arr1, arr2)
0.8
>>> obj_tpr(arr2, arr1)
1.0
"""
_, _, n_obj_result, _, mapping = __distinct_binary_object_correspondences(reference, result, connectivity)
return len(mapping) / float(n_obj_result) | def function[obj_tpr, parameter[result, reference, connectivity]]:
constant[
The true positive rate of distinct binary object detection.
The true positive rates gives a percentage measure of how many distinct binary
objects in the first array also exists in the second array. A partial overlap
(of minimum one voxel) is here considered sufficient.
In cases where two distinct binary object in the first array overlaps with a single
distinct object in the second array, only one is considered to have been detected
successfully.
Parameters
----------
result : array_like
Input data containing objects. Can be any type but will be converted
into binary: background where 0, object everywhere else.
reference : array_like
Input data containing objects. Can be any type but will be converted
into binary: background where 0, object everywhere else.
connectivity : int
The neighbourhood/connectivity considered when determining what accounts
for a distinct binary object. This value is passed to
`scipy.ndimage.morphology.generate_binary_structure` and should usually be :math:`> 1`.
The decision on the connectivity is important, as it can influence the results
strongly. If in doubt, leave it as it is.
Returns
-------
tpr : float
A percentage measure of how many distinct binary objects in ``result`` also exists
in ``reference``. It has the range :math:`[0, 1]`, where a :math:`1` denotes an ideal score.
Raises
------
RuntimeError
If the reference object is empty.
See also
--------
:func:`obj_fpr`
Notes
-----
This is not a real metric, as it is directed. Whatever array is considered as
reference should be passed second. A perfect score of :math:`1` tells that all distinct
binary objects in the reference array also exist in the result array, but does not
reveal anything about additional binary objects in the result array
(use :func:`obj_fpr` for this).
Examples
--------
>>> arr2 = numpy.asarray([[1,0,0],[1,0,1],[0,0,1]])
>>> arr1 = numpy.asarray([[0,0,1],[1,0,1],[0,0,1]])
>>> arr2
array([[1, 0, 0],
[1, 0, 1],
[0, 0, 1]])
>>> arr1
array([[0, 0, 1],
[1, 0, 1],
[0, 0, 1]])
>>> obj_tpr(arr1, arr2)
1.0
>>> obj_tpr(arr2, arr1)
1.0
Example of directedness:
>>> arr2 = numpy.asarray([1,0,1,0,1])
>>> arr1 = numpy.asarray([1,0,1,0,0])
>>> obj_tpr(arr1, arr2)
0.6666666666666666
>>> obj_tpr(arr2, arr1)
1.0
Examples of multiple overlap treatment:
>>> arr2 = numpy.asarray([1,0,1,0,1,1,1])
>>> arr1 = numpy.asarray([1,1,1,0,1,0,1])
>>> obj_tpr(arr1, arr2)
0.6666666666666666
>>> obj_tpr(arr2, arr1)
0.6666666666666666
>>> arr2 = numpy.asarray([1,0,1,1,1,0,1])
>>> arr1 = numpy.asarray([1,1,1,0,1,1,1])
>>> obj_tpr(arr1, arr2)
0.6666666666666666
>>> obj_tpr(arr2, arr1)
1.0
>>> arr2 = numpy.asarray([[1,0,1,0,0],
[1,0,0,0,0],
[1,0,1,1,1],
[0,0,0,0,0],
[1,0,1,0,0]])
>>> arr1 = numpy.asarray([[1,1,1,0,0],
[0,0,0,0,0],
[1,1,1,0,1],
[0,0,0,0,0],
[1,1,1,0,0]])
>>> obj_tpr(arr1, arr2)
0.8
>>> obj_tpr(arr2, arr1)
1.0
]
<ast.Tuple object at 0x7da1b113e6e0> assign[=] call[name[__distinct_binary_object_correspondences], parameter[name[reference], name[result], name[connectivity]]]
return[binary_operation[call[name[len], parameter[name[mapping]]] / call[name[float], parameter[name[n_obj_result]]]]] | keyword[def] identifier[obj_tpr] ( identifier[result] , identifier[reference] , identifier[connectivity] = literal[int] ):
literal[string]
identifier[_] , identifier[_] , identifier[n_obj_result] , identifier[_] , identifier[mapping] = identifier[__distinct_binary_object_correspondences] ( identifier[reference] , identifier[result] , identifier[connectivity] )
keyword[return] identifier[len] ( identifier[mapping] )/ identifier[float] ( identifier[n_obj_result] ) | def obj_tpr(result, reference, connectivity=1):
"""
The true positive rate of distinct binary object detection.
The true positive rates gives a percentage measure of how many distinct binary
objects in the first array also exists in the second array. A partial overlap
(of minimum one voxel) is here considered sufficient.
In cases where two distinct binary object in the first array overlaps with a single
distinct object in the second array, only one is considered to have been detected
successfully.
Parameters
----------
result : array_like
Input data containing objects. Can be any type but will be converted
into binary: background where 0, object everywhere else.
reference : array_like
Input data containing objects. Can be any type but will be converted
into binary: background where 0, object everywhere else.
connectivity : int
The neighbourhood/connectivity considered when determining what accounts
for a distinct binary object. This value is passed to
`scipy.ndimage.morphology.generate_binary_structure` and should usually be :math:`> 1`.
The decision on the connectivity is important, as it can influence the results
strongly. If in doubt, leave it as it is.
Returns
-------
tpr : float
A percentage measure of how many distinct binary objects in ``result`` also exists
in ``reference``. It has the range :math:`[0, 1]`, where a :math:`1` denotes an ideal score.
Raises
------
RuntimeError
If the reference object is empty.
See also
--------
:func:`obj_fpr`
Notes
-----
This is not a real metric, as it is directed. Whatever array is considered as
reference should be passed second. A perfect score of :math:`1` tells that all distinct
binary objects in the reference array also exist in the result array, but does not
reveal anything about additional binary objects in the result array
(use :func:`obj_fpr` for this).
Examples
--------
>>> arr2 = numpy.asarray([[1,0,0],[1,0,1],[0,0,1]])
>>> arr1 = numpy.asarray([[0,0,1],[1,0,1],[0,0,1]])
>>> arr2
array([[1, 0, 0],
[1, 0, 1],
[0, 0, 1]])
>>> arr1
array([[0, 0, 1],
[1, 0, 1],
[0, 0, 1]])
>>> obj_tpr(arr1, arr2)
1.0
>>> obj_tpr(arr2, arr1)
1.0
Example of directedness:
>>> arr2 = numpy.asarray([1,0,1,0,1])
>>> arr1 = numpy.asarray([1,0,1,0,0])
>>> obj_tpr(arr1, arr2)
0.6666666666666666
>>> obj_tpr(arr2, arr1)
1.0
Examples of multiple overlap treatment:
>>> arr2 = numpy.asarray([1,0,1,0,1,1,1])
>>> arr1 = numpy.asarray([1,1,1,0,1,0,1])
>>> obj_tpr(arr1, arr2)
0.6666666666666666
>>> obj_tpr(arr2, arr1)
0.6666666666666666
>>> arr2 = numpy.asarray([1,0,1,1,1,0,1])
>>> arr1 = numpy.asarray([1,1,1,0,1,1,1])
>>> obj_tpr(arr1, arr2)
0.6666666666666666
>>> obj_tpr(arr2, arr1)
1.0
>>> arr2 = numpy.asarray([[1,0,1,0,0],
[1,0,0,0,0],
[1,0,1,1,1],
[0,0,0,0,0],
[1,0,1,0,0]])
>>> arr1 = numpy.asarray([[1,1,1,0,0],
[0,0,0,0,0],
[1,1,1,0,1],
[0,0,0,0,0],
[1,1,1,0,0]])
>>> obj_tpr(arr1, arr2)
0.8
>>> obj_tpr(arr2, arr1)
1.0
"""
(_, _, n_obj_result, _, mapping) = __distinct_binary_object_correspondences(reference, result, connectivity)
return len(mapping) / float(n_obj_result) |
def disable_service_flap_detection(self, service):
"""Disable flap detection for a service
Format of the line that triggers function call::
DISABLE_SERVICE_FLAP_DETECTION;<host_name>;<service_description>
:param service: service to edit
:type service: alignak.objects.service.Service
:return: None
"""
if service.flap_detection_enabled:
service.modified_attributes |= DICT_MODATTR["MODATTR_FLAP_DETECTION_ENABLED"].value
service.flap_detection_enabled = False
# Maybe the service was flapping, if so, stop flapping
if service.is_flapping:
service.is_flapping = False
service.flapping_changes = []
self.send_an_element(service.get_update_status_brok()) | def function[disable_service_flap_detection, parameter[self, service]]:
constant[Disable flap detection for a service
Format of the line that triggers function call::
DISABLE_SERVICE_FLAP_DETECTION;<host_name>;<service_description>
:param service: service to edit
:type service: alignak.objects.service.Service
:return: None
]
if name[service].flap_detection_enabled begin[:]
<ast.AugAssign object at 0x7da18f58cac0>
name[service].flap_detection_enabled assign[=] constant[False]
if name[service].is_flapping begin[:]
name[service].is_flapping assign[=] constant[False]
name[service].flapping_changes assign[=] list[[]]
call[name[self].send_an_element, parameter[call[name[service].get_update_status_brok, parameter[]]]] | keyword[def] identifier[disable_service_flap_detection] ( identifier[self] , identifier[service] ):
literal[string]
keyword[if] identifier[service] . identifier[flap_detection_enabled] :
identifier[service] . identifier[modified_attributes] |= identifier[DICT_MODATTR] [ literal[string] ]. identifier[value]
identifier[service] . identifier[flap_detection_enabled] = keyword[False]
keyword[if] identifier[service] . identifier[is_flapping] :
identifier[service] . identifier[is_flapping] = keyword[False]
identifier[service] . identifier[flapping_changes] =[]
identifier[self] . identifier[send_an_element] ( identifier[service] . identifier[get_update_status_brok] ()) | def disable_service_flap_detection(self, service):
"""Disable flap detection for a service
Format of the line that triggers function call::
DISABLE_SERVICE_FLAP_DETECTION;<host_name>;<service_description>
:param service: service to edit
:type service: alignak.objects.service.Service
:return: None
"""
if service.flap_detection_enabled:
service.modified_attributes |= DICT_MODATTR['MODATTR_FLAP_DETECTION_ENABLED'].value
service.flap_detection_enabled = False
# Maybe the service was flapping, if so, stop flapping
if service.is_flapping:
service.is_flapping = False
service.flapping_changes = [] # depends on [control=['if'], data=[]]
self.send_an_element(service.get_update_status_brok()) # depends on [control=['if'], data=[]] |
def get(self, version, use_cache=True):
"""
Get the filepath to the specified version (downloading it in the process if necessary)
@type version: IpsMeta
@param use_cache: Use cached version downloads if available
@type use_cache: bool
@rtype: str
"""
self.log.info('Retrieving %s version %s', self.meta_name, version.version)
if version.filepath:
if use_cache:
return version.filepath
else:
self.log.info('Ignoring cached %s version: %s', self.meta_name, version.version)
elif not use_cache:
self.log.info("We can't ignore the cache of a version that hasn't been downloaded yet")
version.download()
return version.filepath | def function[get, parameter[self, version, use_cache]]:
constant[
Get the filepath to the specified version (downloading it in the process if necessary)
@type version: IpsMeta
@param use_cache: Use cached version downloads if available
@type use_cache: bool
@rtype: str
]
call[name[self].log.info, parameter[constant[Retrieving %s version %s], name[self].meta_name, name[version].version]]
if name[version].filepath begin[:]
if name[use_cache] begin[:]
return[name[version].filepath]
call[name[version].download, parameter[]]
return[name[version].filepath] | keyword[def] identifier[get] ( identifier[self] , identifier[version] , identifier[use_cache] = keyword[True] ):
literal[string]
identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[self] . identifier[meta_name] , identifier[version] . identifier[version] )
keyword[if] identifier[version] . identifier[filepath] :
keyword[if] identifier[use_cache] :
keyword[return] identifier[version] . identifier[filepath]
keyword[else] :
identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[self] . identifier[meta_name] , identifier[version] . identifier[version] )
keyword[elif] keyword[not] identifier[use_cache] :
identifier[self] . identifier[log] . identifier[info] ( literal[string] )
identifier[version] . identifier[download] ()
keyword[return] identifier[version] . identifier[filepath] | def get(self, version, use_cache=True):
"""
Get the filepath to the specified version (downloading it in the process if necessary)
@type version: IpsMeta
@param use_cache: Use cached version downloads if available
@type use_cache: bool
@rtype: str
"""
self.log.info('Retrieving %s version %s', self.meta_name, version.version)
if version.filepath:
if use_cache:
return version.filepath # depends on [control=['if'], data=[]]
else:
self.log.info('Ignoring cached %s version: %s', self.meta_name, version.version) # depends on [control=['if'], data=[]]
elif not use_cache:
self.log.info("We can't ignore the cache of a version that hasn't been downloaded yet") # depends on [control=['if'], data=[]]
version.download()
return version.filepath |
def _image_summary(self, tf_name, images, step=None):
"""
Log a list of images.
References:
https://github.com/yunjey/pytorch-tutorial/blob/master/tutorials/04-utils/tensorboard/logger.py#L22
Example:
>>> tf_name = 'foo'
>>> value = ([0, 1, 2, 3, 4, 5], [1, 20, 10, 22, 11])
>>> self = Logger(None, is_dummy=True)
>>> images = [np.random.rand(10, 10), np.random.rand(10, 10)]
>>> summary = self._image_summary(tf_name, images, step=None)
>>> assert len(summary.value) == 2
>>> assert summary.value[0].image.width == 10
"""
img_summaries = []
for i, img in enumerate(images):
# Write the image to a string
try:
s = StringIO()
except:
s = BytesIO()
scipy.misc.toimage(img).save(s, format="png")
# Create an Image object
img_sum = summary_pb2.Summary.Image(
encoded_image_string=s.getvalue(),
height=img.shape[0],
width=img.shape[1]
)
# Create a Summary value
img_value = summary_pb2.Summary.Value(tag='{}/{}'.format(tf_name, i),
image=img_sum)
img_summaries.append(img_value)
summary = summary_pb2.Summary()
summary.value.add(tag=tf_name, image=img_sum)
summary = summary_pb2.Summary(value=img_summaries)
return summary | def function[_image_summary, parameter[self, tf_name, images, step]]:
constant[
Log a list of images.
References:
https://github.com/yunjey/pytorch-tutorial/blob/master/tutorials/04-utils/tensorboard/logger.py#L22
Example:
>>> tf_name = 'foo'
>>> value = ([0, 1, 2, 3, 4, 5], [1, 20, 10, 22, 11])
>>> self = Logger(None, is_dummy=True)
>>> images = [np.random.rand(10, 10), np.random.rand(10, 10)]
>>> summary = self._image_summary(tf_name, images, step=None)
>>> assert len(summary.value) == 2
>>> assert summary.value[0].image.width == 10
]
variable[img_summaries] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da2047eb460>, <ast.Name object at 0x7da2047eb4f0>]]] in starred[call[name[enumerate], parameter[name[images]]]] begin[:]
<ast.Try object at 0x7da2047e9ae0>
call[call[name[scipy].misc.toimage, parameter[name[img]]].save, parameter[name[s]]]
variable[img_sum] assign[=] call[name[summary_pb2].Summary.Image, parameter[]]
variable[img_value] assign[=] call[name[summary_pb2].Summary.Value, parameter[]]
call[name[img_summaries].append, parameter[name[img_value]]]
variable[summary] assign[=] call[name[summary_pb2].Summary, parameter[]]
call[name[summary].value.add, parameter[]]
variable[summary] assign[=] call[name[summary_pb2].Summary, parameter[]]
return[name[summary]] | keyword[def] identifier[_image_summary] ( identifier[self] , identifier[tf_name] , identifier[images] , identifier[step] = keyword[None] ):
literal[string]
identifier[img_summaries] =[]
keyword[for] identifier[i] , identifier[img] keyword[in] identifier[enumerate] ( identifier[images] ):
keyword[try] :
identifier[s] = identifier[StringIO] ()
keyword[except] :
identifier[s] = identifier[BytesIO] ()
identifier[scipy] . identifier[misc] . identifier[toimage] ( identifier[img] ). identifier[save] ( identifier[s] , identifier[format] = literal[string] )
identifier[img_sum] = identifier[summary_pb2] . identifier[Summary] . identifier[Image] (
identifier[encoded_image_string] = identifier[s] . identifier[getvalue] (),
identifier[height] = identifier[img] . identifier[shape] [ literal[int] ],
identifier[width] = identifier[img] . identifier[shape] [ literal[int] ]
)
identifier[img_value] = identifier[summary_pb2] . identifier[Summary] . identifier[Value] ( identifier[tag] = literal[string] . identifier[format] ( identifier[tf_name] , identifier[i] ),
identifier[image] = identifier[img_sum] )
identifier[img_summaries] . identifier[append] ( identifier[img_value] )
identifier[summary] = identifier[summary_pb2] . identifier[Summary] ()
identifier[summary] . identifier[value] . identifier[add] ( identifier[tag] = identifier[tf_name] , identifier[image] = identifier[img_sum] )
identifier[summary] = identifier[summary_pb2] . identifier[Summary] ( identifier[value] = identifier[img_summaries] )
keyword[return] identifier[summary] | def _image_summary(self, tf_name, images, step=None):
"""
Log a list of images.
References:
https://github.com/yunjey/pytorch-tutorial/blob/master/tutorials/04-utils/tensorboard/logger.py#L22
Example:
>>> tf_name = 'foo'
>>> value = ([0, 1, 2, 3, 4, 5], [1, 20, 10, 22, 11])
>>> self = Logger(None, is_dummy=True)
>>> images = [np.random.rand(10, 10), np.random.rand(10, 10)]
>>> summary = self._image_summary(tf_name, images, step=None)
>>> assert len(summary.value) == 2
>>> assert summary.value[0].image.width == 10
"""
img_summaries = []
for (i, img) in enumerate(images):
# Write the image to a string
try:
s = StringIO() # depends on [control=['try'], data=[]]
except:
s = BytesIO() # depends on [control=['except'], data=[]]
scipy.misc.toimage(img).save(s, format='png')
# Create an Image object
img_sum = summary_pb2.Summary.Image(encoded_image_string=s.getvalue(), height=img.shape[0], width=img.shape[1])
# Create a Summary value
img_value = summary_pb2.Summary.Value(tag='{}/{}'.format(tf_name, i), image=img_sum)
img_summaries.append(img_value)
summary = summary_pb2.Summary()
summary.value.add(tag=tf_name, image=img_sum) # depends on [control=['for'], data=[]]
summary = summary_pb2.Summary(value=img_summaries)
return summary |
def paragraph_generator(sentences=None):
"""Creates a generator for generating paragraphs.
:arg sentences: list or tuple of sentences you want to use;
defaults to LOREM
:returns: generator
Example::
from eadred.helpers import paragraph_generator
gen = paragraph_generator()
for i in range(50):
mymodel = SomeModel(description=gen.next())
mymodel.save()
"""
if sentences is None:
sentences = LOREM
while True:
# Paragraph consists of 1-7 sentences.
paragraph = [random.choice(sentences)
for num in range(random.randint(1, 7))]
yield u' '.join(paragraph) | def function[paragraph_generator, parameter[sentences]]:
constant[Creates a generator for generating paragraphs.
:arg sentences: list or tuple of sentences you want to use;
defaults to LOREM
:returns: generator
Example::
from eadred.helpers import paragraph_generator
gen = paragraph_generator()
for i in range(50):
mymodel = SomeModel(description=gen.next())
mymodel.save()
]
if compare[name[sentences] is constant[None]] begin[:]
variable[sentences] assign[=] name[LOREM]
while constant[True] begin[:]
variable[paragraph] assign[=] <ast.ListComp object at 0x7da1b176a620>
<ast.Yield object at 0x7da1b17691e0> | keyword[def] identifier[paragraph_generator] ( identifier[sentences] = keyword[None] ):
literal[string]
keyword[if] identifier[sentences] keyword[is] keyword[None] :
identifier[sentences] = identifier[LOREM]
keyword[while] keyword[True] :
identifier[paragraph] =[ identifier[random] . identifier[choice] ( identifier[sentences] )
keyword[for] identifier[num] keyword[in] identifier[range] ( identifier[random] . identifier[randint] ( literal[int] , literal[int] ))]
keyword[yield] literal[string] . identifier[join] ( identifier[paragraph] ) | def paragraph_generator(sentences=None):
"""Creates a generator for generating paragraphs.
:arg sentences: list or tuple of sentences you want to use;
defaults to LOREM
:returns: generator
Example::
from eadred.helpers import paragraph_generator
gen = paragraph_generator()
for i in range(50):
mymodel = SomeModel(description=gen.next())
mymodel.save()
"""
if sentences is None:
sentences = LOREM # depends on [control=['if'], data=['sentences']]
while True:
# Paragraph consists of 1-7 sentences.
paragraph = [random.choice(sentences) for num in range(random.randint(1, 7))]
yield u' '.join(paragraph) # depends on [control=['while'], data=[]] |
def is_nested_list_like(obj):
"""
Check if the object is list-like, and that all of its elements
are also list-like.
.. versionadded:: 0.20.0
Parameters
----------
obj : The object to check
Returns
-------
is_list_like : bool
Whether `obj` has list-like properties.
Examples
--------
>>> is_nested_list_like([[1, 2, 3]])
True
>>> is_nested_list_like([{1, 2, 3}, {1, 2, 3}])
True
>>> is_nested_list_like(["foo"])
False
>>> is_nested_list_like([])
False
>>> is_nested_list_like([[1, 2, 3], 1])
False
Notes
-----
This won't reliably detect whether a consumable iterator (e. g.
a generator) is a nested-list-like without consuming the iterator.
To avoid consuming it, we always return False if the outer container
doesn't define `__len__`.
See Also
--------
is_list_like
"""
return (is_list_like(obj) and hasattr(obj, '__len__') and
len(obj) > 0 and all(is_list_like(item) for item in obj)) | def function[is_nested_list_like, parameter[obj]]:
constant[
Check if the object is list-like, and that all of its elements
are also list-like.
.. versionadded:: 0.20.0
Parameters
----------
obj : The object to check
Returns
-------
is_list_like : bool
Whether `obj` has list-like properties.
Examples
--------
>>> is_nested_list_like([[1, 2, 3]])
True
>>> is_nested_list_like([{1, 2, 3}, {1, 2, 3}])
True
>>> is_nested_list_like(["foo"])
False
>>> is_nested_list_like([])
False
>>> is_nested_list_like([[1, 2, 3], 1])
False
Notes
-----
This won't reliably detect whether a consumable iterator (e. g.
a generator) is a nested-list-like without consuming the iterator.
To avoid consuming it, we always return False if the outer container
doesn't define `__len__`.
See Also
--------
is_list_like
]
return[<ast.BoolOp object at 0x7da1b26ae620>] | keyword[def] identifier[is_nested_list_like] ( identifier[obj] ):
literal[string]
keyword[return] ( identifier[is_list_like] ( identifier[obj] ) keyword[and] identifier[hasattr] ( identifier[obj] , literal[string] ) keyword[and]
identifier[len] ( identifier[obj] )> literal[int] keyword[and] identifier[all] ( identifier[is_list_like] ( identifier[item] ) keyword[for] identifier[item] keyword[in] identifier[obj] )) | def is_nested_list_like(obj):
"""
Check if the object is list-like, and that all of its elements
are also list-like.
.. versionadded:: 0.20.0
Parameters
----------
obj : The object to check
Returns
-------
is_list_like : bool
Whether `obj` has list-like properties.
Examples
--------
>>> is_nested_list_like([[1, 2, 3]])
True
>>> is_nested_list_like([{1, 2, 3}, {1, 2, 3}])
True
>>> is_nested_list_like(["foo"])
False
>>> is_nested_list_like([])
False
>>> is_nested_list_like([[1, 2, 3], 1])
False
Notes
-----
This won't reliably detect whether a consumable iterator (e. g.
a generator) is a nested-list-like without consuming the iterator.
To avoid consuming it, we always return False if the outer container
doesn't define `__len__`.
See Also
--------
is_list_like
"""
return is_list_like(obj) and hasattr(obj, '__len__') and (len(obj) > 0) and all((is_list_like(item) for item in obj)) |
def load(*files):
"""
Loads configuration from one or more files by merging right to left.
:Parameters:
*files : `file-like`
A YAML file to read.
:Returns:
`dict` : the configuration document
"""
if len(files) == 0:
raise errors.ConfigError("No config files provided.")
doc = merge(*(yaml.safe_load(f) for f in files))
return propagate_defaults(doc) | def function[load, parameter[]]:
constant[
Loads configuration from one or more files by merging right to left.
:Parameters:
*files : `file-like`
A YAML file to read.
:Returns:
`dict` : the configuration document
]
if compare[call[name[len], parameter[name[files]]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da1b0aba1a0>
variable[doc] assign[=] call[name[merge], parameter[<ast.Starred object at 0x7da1b0910610>]]
return[call[name[propagate_defaults], parameter[name[doc]]]] | keyword[def] identifier[load] (* identifier[files] ):
literal[string]
keyword[if] identifier[len] ( identifier[files] )== literal[int] :
keyword[raise] identifier[errors] . identifier[ConfigError] ( literal[string] )
identifier[doc] = identifier[merge] (*( identifier[yaml] . identifier[safe_load] ( identifier[f] ) keyword[for] identifier[f] keyword[in] identifier[files] ))
keyword[return] identifier[propagate_defaults] ( identifier[doc] ) | def load(*files):
"""
Loads configuration from one or more files by merging right to left.
:Parameters:
*files : `file-like`
A YAML file to read.
:Returns:
`dict` : the configuration document
"""
if len(files) == 0:
raise errors.ConfigError('No config files provided.') # depends on [control=['if'], data=[]]
doc = merge(*(yaml.safe_load(f) for f in files))
return propagate_defaults(doc) |
def export_dqdv(cell_data, savedir, sep, last_cycle=None):
"""Exports dQ/dV data from a CellpyData instance.
Args:
cell_data: CellpyData instance
savedir: path to the folder where the files should be saved
sep: separator for the .csv-files.
last_cycle: only export up to this cycle (if not None)
"""
logger.debug("exporting dqdv")
filename = cell_data.dataset.loaded_from
no_merged_sets = ""
firstname, extension = os.path.splitext(filename)
firstname += no_merged_sets
if savedir:
firstname = os.path.join(savedir, os.path.basename(firstname))
logger.debug(f"savedir is true: {firstname}")
outname_charge = firstname + "_dqdv_charge.csv"
outname_discharge = firstname + "_dqdv_discharge.csv"
list_of_cycles = cell_data.get_cycle_numbers()
number_of_cycles = len(list_of_cycles)
logger.debug("%s: you have %i cycles" % (filename, number_of_cycles))
# extracting charge
out_data = _extract_dqdv(cell_data, cell_data.get_ccap, last_cycle)
logger.debug("extracted ica for charge")
try:
_save_multi(data=out_data, file_name=outname_charge, sep=sep)
except ExportFailed:
logger.info("could not export ica for charge")
else:
logger.debug("saved ica for charge")
# extracting discharge
out_data = _extract_dqdv(cell_data, cell_data.get_dcap, last_cycle)
logger.debug("extracxted ica for discharge")
try:
_save_multi(data=out_data, file_name=outname_discharge, sep=sep)
except ExportFailed:
logger.info("could not export ica for discharge")
else:
logger.debug("saved ica for discharge") | def function[export_dqdv, parameter[cell_data, savedir, sep, last_cycle]]:
constant[Exports dQ/dV data from a CellpyData instance.
Args:
cell_data: CellpyData instance
savedir: path to the folder where the files should be saved
sep: separator for the .csv-files.
last_cycle: only export up to this cycle (if not None)
]
call[name[logger].debug, parameter[constant[exporting dqdv]]]
variable[filename] assign[=] name[cell_data].dataset.loaded_from
variable[no_merged_sets] assign[=] constant[]
<ast.Tuple object at 0x7da1b1b9e8c0> assign[=] call[name[os].path.splitext, parameter[name[filename]]]
<ast.AugAssign object at 0x7da1b196ad10>
if name[savedir] begin[:]
variable[firstname] assign[=] call[name[os].path.join, parameter[name[savedir], call[name[os].path.basename, parameter[name[firstname]]]]]
call[name[logger].debug, parameter[<ast.JoinedStr object at 0x7da1b196bd90>]]
variable[outname_charge] assign[=] binary_operation[name[firstname] + constant[_dqdv_charge.csv]]
variable[outname_discharge] assign[=] binary_operation[name[firstname] + constant[_dqdv_discharge.csv]]
variable[list_of_cycles] assign[=] call[name[cell_data].get_cycle_numbers, parameter[]]
variable[number_of_cycles] assign[=] call[name[len], parameter[name[list_of_cycles]]]
call[name[logger].debug, parameter[binary_operation[constant[%s: you have %i cycles] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b196a5c0>, <ast.Name object at 0x7da1b196a680>]]]]]
variable[out_data] assign[=] call[name[_extract_dqdv], parameter[name[cell_data], name[cell_data].get_ccap, name[last_cycle]]]
call[name[logger].debug, parameter[constant[extracted ica for charge]]]
<ast.Try object at 0x7da1b196a350>
variable[out_data] assign[=] call[name[_extract_dqdv], parameter[name[cell_data], name[cell_data].get_dcap, name[last_cycle]]]
call[name[logger].debug, parameter[constant[extracxted ica for discharge]]]
<ast.Try object at 0x7da1b196a2c0> | keyword[def] identifier[export_dqdv] ( identifier[cell_data] , identifier[savedir] , identifier[sep] , identifier[last_cycle] = keyword[None] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] )
identifier[filename] = identifier[cell_data] . identifier[dataset] . identifier[loaded_from]
identifier[no_merged_sets] = literal[string]
identifier[firstname] , identifier[extension] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[filename] )
identifier[firstname] += identifier[no_merged_sets]
keyword[if] identifier[savedir] :
identifier[firstname] = identifier[os] . identifier[path] . identifier[join] ( identifier[savedir] , identifier[os] . identifier[path] . identifier[basename] ( identifier[firstname] ))
identifier[logger] . identifier[debug] ( literal[string] )
identifier[outname_charge] = identifier[firstname] + literal[string]
identifier[outname_discharge] = identifier[firstname] + literal[string]
identifier[list_of_cycles] = identifier[cell_data] . identifier[get_cycle_numbers] ()
identifier[number_of_cycles] = identifier[len] ( identifier[list_of_cycles] )
identifier[logger] . identifier[debug] ( literal[string] %( identifier[filename] , identifier[number_of_cycles] ))
identifier[out_data] = identifier[_extract_dqdv] ( identifier[cell_data] , identifier[cell_data] . identifier[get_ccap] , identifier[last_cycle] )
identifier[logger] . identifier[debug] ( literal[string] )
keyword[try] :
identifier[_save_multi] ( identifier[data] = identifier[out_data] , identifier[file_name] = identifier[outname_charge] , identifier[sep] = identifier[sep] )
keyword[except] identifier[ExportFailed] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[else] :
identifier[logger] . identifier[debug] ( literal[string] )
identifier[out_data] = identifier[_extract_dqdv] ( identifier[cell_data] , identifier[cell_data] . identifier[get_dcap] , identifier[last_cycle] )
identifier[logger] . identifier[debug] ( literal[string] )
keyword[try] :
identifier[_save_multi] ( identifier[data] = identifier[out_data] , identifier[file_name] = identifier[outname_discharge] , identifier[sep] = identifier[sep] )
keyword[except] identifier[ExportFailed] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[else] :
identifier[logger] . identifier[debug] ( literal[string] ) | def export_dqdv(cell_data, savedir, sep, last_cycle=None):
"""Exports dQ/dV data from a CellpyData instance.
Args:
cell_data: CellpyData instance
savedir: path to the folder where the files should be saved
sep: separator for the .csv-files.
last_cycle: only export up to this cycle (if not None)
"""
logger.debug('exporting dqdv')
filename = cell_data.dataset.loaded_from
no_merged_sets = ''
(firstname, extension) = os.path.splitext(filename)
firstname += no_merged_sets
if savedir:
firstname = os.path.join(savedir, os.path.basename(firstname))
logger.debug(f'savedir is true: {firstname}') # depends on [control=['if'], data=[]]
outname_charge = firstname + '_dqdv_charge.csv'
outname_discharge = firstname + '_dqdv_discharge.csv'
list_of_cycles = cell_data.get_cycle_numbers()
number_of_cycles = len(list_of_cycles)
logger.debug('%s: you have %i cycles' % (filename, number_of_cycles))
# extracting charge
out_data = _extract_dqdv(cell_data, cell_data.get_ccap, last_cycle)
logger.debug('extracted ica for charge')
try:
_save_multi(data=out_data, file_name=outname_charge, sep=sep) # depends on [control=['try'], data=[]]
except ExportFailed:
logger.info('could not export ica for charge') # depends on [control=['except'], data=[]]
else:
logger.debug('saved ica for charge')
# extracting discharge
out_data = _extract_dqdv(cell_data, cell_data.get_dcap, last_cycle)
logger.debug('extracxted ica for discharge')
try:
_save_multi(data=out_data, file_name=outname_discharge, sep=sep) # depends on [control=['try'], data=[]]
except ExportFailed:
logger.info('could not export ica for discharge') # depends on [control=['except'], data=[]]
else:
logger.debug('saved ica for discharge') |
def user_quantity_remaining(self, user, filtered=True):
''' returns 0 if the date range is violated, otherwise, it will return
the quantity remaining under the stock limit.
The filter for this condition must add an annotation called "remainder"
in order for this to work.
'''
if filtered:
if hasattr(self.condition, "remainder"):
return self.condition.remainder
# Mark self.condition with a remainder
qs = type(self.condition).objects.filter(pk=self.condition.id)
qs = self.pre_filter(qs, user)
if len(qs) > 0:
return qs[0].remainder
else:
return 0 | def function[user_quantity_remaining, parameter[self, user, filtered]]:
constant[ returns 0 if the date range is violated, otherwise, it will return
the quantity remaining under the stock limit.
The filter for this condition must add an annotation called "remainder"
in order for this to work.
]
if name[filtered] begin[:]
if call[name[hasattr], parameter[name[self].condition, constant[remainder]]] begin[:]
return[name[self].condition.remainder]
variable[qs] assign[=] call[call[name[type], parameter[name[self].condition]].objects.filter, parameter[]]
variable[qs] assign[=] call[name[self].pre_filter, parameter[name[qs], name[user]]]
if compare[call[name[len], parameter[name[qs]]] greater[>] constant[0]] begin[:]
return[call[name[qs]][constant[0]].remainder] | keyword[def] identifier[user_quantity_remaining] ( identifier[self] , identifier[user] , identifier[filtered] = keyword[True] ):
literal[string]
keyword[if] identifier[filtered] :
keyword[if] identifier[hasattr] ( identifier[self] . identifier[condition] , literal[string] ):
keyword[return] identifier[self] . identifier[condition] . identifier[remainder]
identifier[qs] = identifier[type] ( identifier[self] . identifier[condition] ). identifier[objects] . identifier[filter] ( identifier[pk] = identifier[self] . identifier[condition] . identifier[id] )
identifier[qs] = identifier[self] . identifier[pre_filter] ( identifier[qs] , identifier[user] )
keyword[if] identifier[len] ( identifier[qs] )> literal[int] :
keyword[return] identifier[qs] [ literal[int] ]. identifier[remainder]
keyword[else] :
keyword[return] literal[int] | def user_quantity_remaining(self, user, filtered=True):
""" returns 0 if the date range is violated, otherwise, it will return
the quantity remaining under the stock limit.
The filter for this condition must add an annotation called "remainder"
in order for this to work.
"""
if filtered:
if hasattr(self.condition, 'remainder'):
return self.condition.remainder # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Mark self.condition with a remainder
qs = type(self.condition).objects.filter(pk=self.condition.id)
qs = self.pre_filter(qs, user)
if len(qs) > 0:
return qs[0].remainder # depends on [control=['if'], data=[]]
else:
return 0 |
def get_step(self, grad):
"""Computes the 'step' to take for the next gradient descent update.
Returns the step rather than performing the update so that
parameters can be updated in place rather than overwritten.
Examples
--------
>>> gradient = # ...
>>> optimizer = AdaGradOptimizer(0.01)
>>> params -= optimizer.get_step(gradient)
Parameters
----------
grad
Returns
-------
np.array
Size matches `grad`.
"""
if self._momentum is None:
self._momentum = self.initial_accumulator_value * np.ones_like(grad)
self._momentum += grad ** 2
return self.learning_rate * grad / np.sqrt(self._momentum) | def function[get_step, parameter[self, grad]]:
constant[Computes the 'step' to take for the next gradient descent update.
Returns the step rather than performing the update so that
parameters can be updated in place rather than overwritten.
Examples
--------
>>> gradient = # ...
>>> optimizer = AdaGradOptimizer(0.01)
>>> params -= optimizer.get_step(gradient)
Parameters
----------
grad
Returns
-------
np.array
Size matches `grad`.
]
if compare[name[self]._momentum is constant[None]] begin[:]
name[self]._momentum assign[=] binary_operation[name[self].initial_accumulator_value * call[name[np].ones_like, parameter[name[grad]]]]
<ast.AugAssign object at 0x7da18ede4250>
return[binary_operation[binary_operation[name[self].learning_rate * name[grad]] / call[name[np].sqrt, parameter[name[self]._momentum]]]] | keyword[def] identifier[get_step] ( identifier[self] , identifier[grad] ):
literal[string]
keyword[if] identifier[self] . identifier[_momentum] keyword[is] keyword[None] :
identifier[self] . identifier[_momentum] = identifier[self] . identifier[initial_accumulator_value] * identifier[np] . identifier[ones_like] ( identifier[grad] )
identifier[self] . identifier[_momentum] += identifier[grad] ** literal[int]
keyword[return] identifier[self] . identifier[learning_rate] * identifier[grad] / identifier[np] . identifier[sqrt] ( identifier[self] . identifier[_momentum] ) | def get_step(self, grad):
"""Computes the 'step' to take for the next gradient descent update.
Returns the step rather than performing the update so that
parameters can be updated in place rather than overwritten.
Examples
--------
>>> gradient = # ...
>>> optimizer = AdaGradOptimizer(0.01)
>>> params -= optimizer.get_step(gradient)
Parameters
----------
grad
Returns
-------
np.array
Size matches `grad`.
"""
if self._momentum is None:
self._momentum = self.initial_accumulator_value * np.ones_like(grad) # depends on [control=['if'], data=[]]
self._momentum += grad ** 2
return self.learning_rate * grad / np.sqrt(self._momentum) |
def calc_dmgrid(d, maxloss=0.05, dt=3000., mindm=0., maxdm=0.):
""" Function to calculate the DM values for a given maximum sensitivity loss.
maxloss is sensitivity loss tolerated by dm bin width. dt is assumed pulse width in microsec.
"""
# parameters
tsamp = d['inttime']*1e6 # in microsec
k = 8.3
freq = d['freq'].mean() # central (mean) frequency in GHz
bw = 1e3*(d['freq'][-1] - d['freq'][0])
ch = 1e3*(d['freq'][1] - d['freq'][0]) # channel width in MHz
# width functions and loss factor
dt0 = lambda dm: n.sqrt(dt**2 + tsamp**2 + ((k*dm*ch)/(freq**3))**2)
dt1 = lambda dm, ddm: n.sqrt(dt**2 + tsamp**2 + ((k*dm*ch)/(freq**3))**2 + ((k*ddm*bw)/(freq**3.))**2)
loss = lambda dm, ddm: 1 - n.sqrt(dt0(dm)/dt1(dm,ddm))
loss_cordes = lambda ddm, dfreq, dt, freq: 1 - (n.sqrt(n.pi) / (2 * 6.91e-3 * ddm * dfreq / (dt*freq**3))) * erf(6.91e-3 * ddm * dfreq / (dt*freq**3)) # not quite right for underresolved pulses
if maxdm == 0:
return [0]
else:
# iterate over dmgrid to find optimal dm values. go higher than maxdm to be sure final list includes full range.
dmgrid = n.arange(mindm, maxdm, 0.05)
dmgrid_final = [dmgrid[0]]
for i in range(len(dmgrid)):
ddm = (dmgrid[i] - dmgrid_final[-1])/2.
ll = loss(dmgrid[i],ddm)
if ll > maxloss:
dmgrid_final.append(dmgrid[i])
return dmgrid_final | def function[calc_dmgrid, parameter[d, maxloss, dt, mindm, maxdm]]:
constant[ Function to calculate the DM values for a given maximum sensitivity loss.
maxloss is sensitivity loss tolerated by dm bin width. dt is assumed pulse width in microsec.
]
variable[tsamp] assign[=] binary_operation[call[name[d]][constant[inttime]] * constant[1000000.0]]
variable[k] assign[=] constant[8.3]
variable[freq] assign[=] call[call[name[d]][constant[freq]].mean, parameter[]]
variable[bw] assign[=] binary_operation[constant[1000.0] * binary_operation[call[call[name[d]][constant[freq]]][<ast.UnaryOp object at 0x7da1b248c880>] - call[call[name[d]][constant[freq]]][constant[0]]]]
variable[ch] assign[=] binary_operation[constant[1000.0] * binary_operation[call[call[name[d]][constant[freq]]][constant[1]] - call[call[name[d]][constant[freq]]][constant[0]]]]
variable[dt0] assign[=] <ast.Lambda object at 0x7da1b248c0d0>
variable[dt1] assign[=] <ast.Lambda object at 0x7da1b248dde0>
variable[loss] assign[=] <ast.Lambda object at 0x7da1b248e140>
variable[loss_cordes] assign[=] <ast.Lambda object at 0x7da1b248e770>
if compare[name[maxdm] equal[==] constant[0]] begin[:]
return[list[[<ast.Constant object at 0x7da1b248f040>]]]
return[name[dmgrid_final]] | keyword[def] identifier[calc_dmgrid] ( identifier[d] , identifier[maxloss] = literal[int] , identifier[dt] = literal[int] , identifier[mindm] = literal[int] , identifier[maxdm] = literal[int] ):
literal[string]
identifier[tsamp] = identifier[d] [ literal[string] ]* literal[int]
identifier[k] = literal[int]
identifier[freq] = identifier[d] [ literal[string] ]. identifier[mean] ()
identifier[bw] = literal[int] *( identifier[d] [ literal[string] ][- literal[int] ]- identifier[d] [ literal[string] ][ literal[int] ])
identifier[ch] = literal[int] *( identifier[d] [ literal[string] ][ literal[int] ]- identifier[d] [ literal[string] ][ literal[int] ])
identifier[dt0] = keyword[lambda] identifier[dm] : identifier[n] . identifier[sqrt] ( identifier[dt] ** literal[int] + identifier[tsamp] ** literal[int] +(( identifier[k] * identifier[dm] * identifier[ch] )/( identifier[freq] ** literal[int] ))** literal[int] )
identifier[dt1] = keyword[lambda] identifier[dm] , identifier[ddm] : identifier[n] . identifier[sqrt] ( identifier[dt] ** literal[int] + identifier[tsamp] ** literal[int] +(( identifier[k] * identifier[dm] * identifier[ch] )/( identifier[freq] ** literal[int] ))** literal[int] +(( identifier[k] * identifier[ddm] * identifier[bw] )/( identifier[freq] ** literal[int] ))** literal[int] )
identifier[loss] = keyword[lambda] identifier[dm] , identifier[ddm] : literal[int] - identifier[n] . identifier[sqrt] ( identifier[dt0] ( identifier[dm] )/ identifier[dt1] ( identifier[dm] , identifier[ddm] ))
identifier[loss_cordes] = keyword[lambda] identifier[ddm] , identifier[dfreq] , identifier[dt] , identifier[freq] : literal[int] -( identifier[n] . identifier[sqrt] ( identifier[n] . identifier[pi] )/( literal[int] * literal[int] * identifier[ddm] * identifier[dfreq] /( identifier[dt] * identifier[freq] ** literal[int] )))* identifier[erf] ( literal[int] * identifier[ddm] * identifier[dfreq] /( identifier[dt] * identifier[freq] ** literal[int] ))
keyword[if] identifier[maxdm] == literal[int] :
keyword[return] [ literal[int] ]
keyword[else] :
identifier[dmgrid] = identifier[n] . identifier[arange] ( identifier[mindm] , identifier[maxdm] , literal[int] )
identifier[dmgrid_final] =[ identifier[dmgrid] [ literal[int] ]]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[dmgrid] )):
identifier[ddm] =( identifier[dmgrid] [ identifier[i] ]- identifier[dmgrid_final] [- literal[int] ])/ literal[int]
identifier[ll] = identifier[loss] ( identifier[dmgrid] [ identifier[i] ], identifier[ddm] )
keyword[if] identifier[ll] > identifier[maxloss] :
identifier[dmgrid_final] . identifier[append] ( identifier[dmgrid] [ identifier[i] ])
keyword[return] identifier[dmgrid_final] | def calc_dmgrid(d, maxloss=0.05, dt=3000.0, mindm=0.0, maxdm=0.0):
""" Function to calculate the DM values for a given maximum sensitivity loss.
maxloss is sensitivity loss tolerated by dm bin width. dt is assumed pulse width in microsec.
"""
# parameters
tsamp = d['inttime'] * 1000000.0 # in microsec
k = 8.3
freq = d['freq'].mean() # central (mean) frequency in GHz
bw = 1000.0 * (d['freq'][-1] - d['freq'][0])
ch = 1000.0 * (d['freq'][1] - d['freq'][0]) # channel width in MHz
# width functions and loss factor
dt0 = lambda dm: n.sqrt(dt ** 2 + tsamp ** 2 + (k * dm * ch / freq ** 3) ** 2)
dt1 = lambda dm, ddm: n.sqrt(dt ** 2 + tsamp ** 2 + (k * dm * ch / freq ** 3) ** 2 + (k * ddm * bw / freq ** 3.0) ** 2)
loss = lambda dm, ddm: 1 - n.sqrt(dt0(dm) / dt1(dm, ddm))
loss_cordes = lambda ddm, dfreq, dt, freq: 1 - n.sqrt(n.pi) / (2 * 0.00691 * ddm * dfreq / (dt * freq ** 3)) * erf(0.00691 * ddm * dfreq / (dt * freq ** 3)) # not quite right for underresolved pulses
if maxdm == 0:
return [0] # depends on [control=['if'], data=[]]
else:
# iterate over dmgrid to find optimal dm values. go higher than maxdm to be sure final list includes full range.
dmgrid = n.arange(mindm, maxdm, 0.05)
dmgrid_final = [dmgrid[0]]
for i in range(len(dmgrid)):
ddm = (dmgrid[i] - dmgrid_final[-1]) / 2.0
ll = loss(dmgrid[i], ddm)
if ll > maxloss:
dmgrid_final.append(dmgrid[i]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return dmgrid_final |
def render_urchin(self, ctx, data):
"""
Render the code for recording Google Analytics statistics, if so
configured.
"""
key = APIKey.getKeyForAPI(self._siteStore(), APIKey.URCHIN)
if key is None:
return ''
return ctx.tag.fillSlots('urchin-key', key.apiKey) | def function[render_urchin, parameter[self, ctx, data]]:
constant[
Render the code for recording Google Analytics statistics, if so
configured.
]
variable[key] assign[=] call[name[APIKey].getKeyForAPI, parameter[call[name[self]._siteStore, parameter[]], name[APIKey].URCHIN]]
if compare[name[key] is constant[None]] begin[:]
return[constant[]]
return[call[name[ctx].tag.fillSlots, parameter[constant[urchin-key], name[key].apiKey]]] | keyword[def] identifier[render_urchin] ( identifier[self] , identifier[ctx] , identifier[data] ):
literal[string]
identifier[key] = identifier[APIKey] . identifier[getKeyForAPI] ( identifier[self] . identifier[_siteStore] (), identifier[APIKey] . identifier[URCHIN] )
keyword[if] identifier[key] keyword[is] keyword[None] :
keyword[return] literal[string]
keyword[return] identifier[ctx] . identifier[tag] . identifier[fillSlots] ( literal[string] , identifier[key] . identifier[apiKey] ) | def render_urchin(self, ctx, data):
"""
Render the code for recording Google Analytics statistics, if so
configured.
"""
key = APIKey.getKeyForAPI(self._siteStore(), APIKey.URCHIN)
if key is None:
return '' # depends on [control=['if'], data=[]]
return ctx.tag.fillSlots('urchin-key', key.apiKey) |
def recursive_refs(envs, name):
"""
Return set of recursive refs for given env name
>>> local_refs = sorted(recursive_refs([
... {'name': 'base', 'refs': []},
... {'name': 'test', 'refs': ['base']},
... {'name': 'local', 'refs': ['test']},
... ], 'local'))
>>> local_refs == ['base', 'test']
True
"""
refs_by_name = {
env['name']: set(env['refs'])
for env in envs
}
refs = refs_by_name[name]
if refs:
indirect_refs = set(itertools.chain.from_iterable([
recursive_refs(envs, ref)
for ref in refs
]))
else:
indirect_refs = set()
return set.union(refs, indirect_refs) | def function[recursive_refs, parameter[envs, name]]:
constant[
Return set of recursive refs for given env name
>>> local_refs = sorted(recursive_refs([
... {'name': 'base', 'refs': []},
... {'name': 'test', 'refs': ['base']},
... {'name': 'local', 'refs': ['test']},
... ], 'local'))
>>> local_refs == ['base', 'test']
True
]
variable[refs_by_name] assign[=] <ast.DictComp object at 0x7da204620190>
variable[refs] assign[=] call[name[refs_by_name]][name[name]]
if name[refs] begin[:]
variable[indirect_refs] assign[=] call[name[set], parameter[call[name[itertools].chain.from_iterable, parameter[<ast.ListComp object at 0x7da204623c10>]]]]
return[call[name[set].union, parameter[name[refs], name[indirect_refs]]]] | keyword[def] identifier[recursive_refs] ( identifier[envs] , identifier[name] ):
literal[string]
identifier[refs_by_name] ={
identifier[env] [ literal[string] ]: identifier[set] ( identifier[env] [ literal[string] ])
keyword[for] identifier[env] keyword[in] identifier[envs]
}
identifier[refs] = identifier[refs_by_name] [ identifier[name] ]
keyword[if] identifier[refs] :
identifier[indirect_refs] = identifier[set] ( identifier[itertools] . identifier[chain] . identifier[from_iterable] ([
identifier[recursive_refs] ( identifier[envs] , identifier[ref] )
keyword[for] identifier[ref] keyword[in] identifier[refs]
]))
keyword[else] :
identifier[indirect_refs] = identifier[set] ()
keyword[return] identifier[set] . identifier[union] ( identifier[refs] , identifier[indirect_refs] ) | def recursive_refs(envs, name):
"""
Return set of recursive refs for given env name
>>> local_refs = sorted(recursive_refs([
... {'name': 'base', 'refs': []},
... {'name': 'test', 'refs': ['base']},
... {'name': 'local', 'refs': ['test']},
... ], 'local'))
>>> local_refs == ['base', 'test']
True
"""
refs_by_name = {env['name']: set(env['refs']) for env in envs}
refs = refs_by_name[name]
if refs:
indirect_refs = set(itertools.chain.from_iterable([recursive_refs(envs, ref) for ref in refs])) # depends on [control=['if'], data=[]]
else:
indirect_refs = set()
return set.union(refs, indirect_refs) |
def precmd(self, line):
"""
Allow commands to have a last parameter of 'cookie=somevalue'
TODO somevalue will be prepended onto any output lines so
that editors can distinguish output from certain kinds
of events they have sent.
:param line:
:return:
"""
args = shlex.split(line or "")
if args and 'cookie=' in args[-1]:
cookie_index = line.index('cookie=')
cookie = line[cookie_index + 7:]
line = line[:cookie_index].strip()
self.cookie = cookie
if line.startswith('#'):
return ''
elif '=' in line:
# allow somevar=somevalue
# first check if we really mean a command
cmdname = line.partition(" ")[0]
if hasattr(self, "do_%s" % cmdname):
return line
if not line.startswith("set "):
return "set " + line
else:
return line
if len(args) and args[0] in self.shortcuts:
return "%s %s" % (self.shortcuts[args[0]], " ".join(args[1:]))
else:
return line | def function[precmd, parameter[self, line]]:
constant[
Allow commands to have a last parameter of 'cookie=somevalue'
TODO somevalue will be prepended onto any output lines so
that editors can distinguish output from certain kinds
of events they have sent.
:param line:
:return:
]
variable[args] assign[=] call[name[shlex].split, parameter[<ast.BoolOp object at 0x7da2041dae00>]]
if <ast.BoolOp object at 0x7da2041dbca0> begin[:]
variable[cookie_index] assign[=] call[name[line].index, parameter[constant[cookie=]]]
variable[cookie] assign[=] call[name[line]][<ast.Slice object at 0x7da2041d9480>]
variable[line] assign[=] call[call[name[line]][<ast.Slice object at 0x7da2041dae90>].strip, parameter[]]
name[self].cookie assign[=] name[cookie]
if call[name[line].startswith, parameter[constant[#]]] begin[:]
return[constant[]]
if <ast.BoolOp object at 0x7da2041dae30> begin[:]
return[binary_operation[constant[%s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da2041d8970>, <ast.Call object at 0x7da2041dabf0>]]]] | keyword[def] identifier[precmd] ( identifier[self] , identifier[line] ):
literal[string]
identifier[args] = identifier[shlex] . identifier[split] ( identifier[line] keyword[or] literal[string] )
keyword[if] identifier[args] keyword[and] literal[string] keyword[in] identifier[args] [- literal[int] ]:
identifier[cookie_index] = identifier[line] . identifier[index] ( literal[string] )
identifier[cookie] = identifier[line] [ identifier[cookie_index] + literal[int] :]
identifier[line] = identifier[line] [: identifier[cookie_index] ]. identifier[strip] ()
identifier[self] . identifier[cookie] = identifier[cookie]
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
keyword[return] literal[string]
keyword[elif] literal[string] keyword[in] identifier[line] :
identifier[cmdname] = identifier[line] . identifier[partition] ( literal[string] )[ literal[int] ]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] % identifier[cmdname] ):
keyword[return] identifier[line]
keyword[if] keyword[not] identifier[line] . identifier[startswith] ( literal[string] ):
keyword[return] literal[string] + identifier[line]
keyword[else] :
keyword[return] identifier[line]
keyword[if] identifier[len] ( identifier[args] ) keyword[and] identifier[args] [ literal[int] ] keyword[in] identifier[self] . identifier[shortcuts] :
keyword[return] literal[string] %( identifier[self] . identifier[shortcuts] [ identifier[args] [ literal[int] ]], literal[string] . identifier[join] ( identifier[args] [ literal[int] :]))
keyword[else] :
keyword[return] identifier[line] | def precmd(self, line):
"""
Allow commands to have a last parameter of 'cookie=somevalue'
TODO somevalue will be prepended onto any output lines so
that editors can distinguish output from certain kinds
of events they have sent.
:param line:
:return:
"""
args = shlex.split(line or '')
if args and 'cookie=' in args[-1]:
cookie_index = line.index('cookie=')
cookie = line[cookie_index + 7:]
line = line[:cookie_index].strip()
self.cookie = cookie # depends on [control=['if'], data=[]]
if line.startswith('#'):
return '' # depends on [control=['if'], data=[]]
elif '=' in line:
# allow somevar=somevalue
# first check if we really mean a command
cmdname = line.partition(' ')[0]
if hasattr(self, 'do_%s' % cmdname):
return line # depends on [control=['if'], data=[]]
if not line.startswith('set '):
return 'set ' + line # depends on [control=['if'], data=[]]
else:
return line # depends on [control=['if'], data=['line']]
if len(args) and args[0] in self.shortcuts:
return '%s %s' % (self.shortcuts[args[0]], ' '.join(args[1:])) # depends on [control=['if'], data=[]]
else:
return line |
def stop_serve_forever(self):
"""Stop serve_forever_stoppable()."""
assert hasattr(
self, "stop_request"
), "serve_forever_stoppable() must be called before"
assert not self.stop_request, "stop_serve_forever() must only be called once"
# # Flag stop request
self.stop_request = True
time.sleep(0.1)
if self.stopped:
# _logger.info "stop_serve_forever() 'stopped'."
return
# Add a do_SHUTDOWN method to to the ExtHandler class
def _shutdownHandler(self):
"""Send 200 OK response, and set server.stop_request to True.
http://code.activestate.com/recipes/336012/
"""
# _logger.info "Handling do_SHUTDOWN request"
self.send_response(200)
self.end_headers()
self.server.stop_request = True
if not hasattr(ExtHandler, "do_SHUTDOWN"):
ExtHandler.do_SHUTDOWN = _shutdownHandler
# Send request, so socket is unblocked
(host, port) = self.server_address
# _logger.info "stop_serve_forever() sending {}:{}/ SHUTDOWN...".format(host, port)
conn = http_client.HTTPConnection("{}:{}".format(host, port))
conn.request("SHUTDOWN", "/")
# _logger.info "stop_serve_forever() sent SHUTDOWN request, reading response..."
conn.getresponse()
# _logger.info "stop_serve_forever() received SHUTDOWN response."
assert self.stop_request | def function[stop_serve_forever, parameter[self]]:
constant[Stop serve_forever_stoppable().]
assert[call[name[hasattr], parameter[name[self], constant[stop_request]]]]
assert[<ast.UnaryOp object at 0x7da1b0121b70>]
name[self].stop_request assign[=] constant[True]
call[name[time].sleep, parameter[constant[0.1]]]
if name[self].stopped begin[:]
return[None]
def function[_shutdownHandler, parameter[self]]:
constant[Send 200 OK response, and set server.stop_request to True.
http://code.activestate.com/recipes/336012/
]
call[name[self].send_response, parameter[constant[200]]]
call[name[self].end_headers, parameter[]]
name[self].server.stop_request assign[=] constant[True]
if <ast.UnaryOp object at 0x7da1b0122350> begin[:]
name[ExtHandler].do_SHUTDOWN assign[=] name[_shutdownHandler]
<ast.Tuple object at 0x7da1b0121120> assign[=] name[self].server_address
variable[conn] assign[=] call[name[http_client].HTTPConnection, parameter[call[constant[{}:{}].format, parameter[name[host], name[port]]]]]
call[name[conn].request, parameter[constant[SHUTDOWN], constant[/]]]
call[name[conn].getresponse, parameter[]]
assert[name[self].stop_request] | keyword[def] identifier[stop_serve_forever] ( identifier[self] ):
literal[string]
keyword[assert] identifier[hasattr] (
identifier[self] , literal[string]
), literal[string]
keyword[assert] keyword[not] identifier[self] . identifier[stop_request] , literal[string]
identifier[self] . identifier[stop_request] = keyword[True]
identifier[time] . identifier[sleep] ( literal[int] )
keyword[if] identifier[self] . identifier[stopped] :
keyword[return]
keyword[def] identifier[_shutdownHandler] ( identifier[self] ):
literal[string]
identifier[self] . identifier[send_response] ( literal[int] )
identifier[self] . identifier[end_headers] ()
identifier[self] . identifier[server] . identifier[stop_request] = keyword[True]
keyword[if] keyword[not] identifier[hasattr] ( identifier[ExtHandler] , literal[string] ):
identifier[ExtHandler] . identifier[do_SHUTDOWN] = identifier[_shutdownHandler]
( identifier[host] , identifier[port] )= identifier[self] . identifier[server_address]
identifier[conn] = identifier[http_client] . identifier[HTTPConnection] ( literal[string] . identifier[format] ( identifier[host] , identifier[port] ))
identifier[conn] . identifier[request] ( literal[string] , literal[string] )
identifier[conn] . identifier[getresponse] ()
keyword[assert] identifier[self] . identifier[stop_request] | def stop_serve_forever(self):
"""Stop serve_forever_stoppable()."""
assert hasattr(self, 'stop_request'), 'serve_forever_stoppable() must be called before'
assert not self.stop_request, 'stop_serve_forever() must only be called once'
# # Flag stop request
self.stop_request = True
time.sleep(0.1)
if self.stopped:
# _logger.info "stop_serve_forever() 'stopped'."
return # depends on [control=['if'], data=[]]
# Add a do_SHUTDOWN method to to the ExtHandler class
def _shutdownHandler(self):
"""Send 200 OK response, and set server.stop_request to True.
http://code.activestate.com/recipes/336012/
"""
# _logger.info "Handling do_SHUTDOWN request"
self.send_response(200)
self.end_headers()
self.server.stop_request = True
if not hasattr(ExtHandler, 'do_SHUTDOWN'):
ExtHandler.do_SHUTDOWN = _shutdownHandler # depends on [control=['if'], data=[]]
# Send request, so socket is unblocked
(host, port) = self.server_address
# _logger.info "stop_serve_forever() sending {}:{}/ SHUTDOWN...".format(host, port)
conn = http_client.HTTPConnection('{}:{}'.format(host, port))
conn.request('SHUTDOWN', '/')
# _logger.info "stop_serve_forever() sent SHUTDOWN request, reading response..."
conn.getresponse()
# _logger.info "stop_serve_forever() received SHUTDOWN response."
assert self.stop_request |
def try_friends(self, others):
''' Look for random agents around me and try to befriend them'''
befriended = False
k = int(10*self['openness'])
shuffle(others)
for friend in islice(others, k): # random.choice >= 3.7
if friend == self:
continue
if friend.befriend(self):
self.befriend(friend, force=True)
self.debug('Hooray! new friend: {}'.format(friend.id))
befriended = True
else:
self.debug('{} does not want to be friends'.format(friend.id))
return befriended | def function[try_friends, parameter[self, others]]:
constant[ Look for random agents around me and try to befriend them]
variable[befriended] assign[=] constant[False]
variable[k] assign[=] call[name[int], parameter[binary_operation[constant[10] * call[name[self]][constant[openness]]]]]
call[name[shuffle], parameter[name[others]]]
for taget[name[friend]] in starred[call[name[islice], parameter[name[others], name[k]]]] begin[:]
if compare[name[friend] equal[==] name[self]] begin[:]
continue
if call[name[friend].befriend, parameter[name[self]]] begin[:]
call[name[self].befriend, parameter[name[friend]]]
call[name[self].debug, parameter[call[constant[Hooray! new friend: {}].format, parameter[name[friend].id]]]]
variable[befriended] assign[=] constant[True]
return[name[befriended]] | keyword[def] identifier[try_friends] ( identifier[self] , identifier[others] ):
literal[string]
identifier[befriended] = keyword[False]
identifier[k] = identifier[int] ( literal[int] * identifier[self] [ literal[string] ])
identifier[shuffle] ( identifier[others] )
keyword[for] identifier[friend] keyword[in] identifier[islice] ( identifier[others] , identifier[k] ):
keyword[if] identifier[friend] == identifier[self] :
keyword[continue]
keyword[if] identifier[friend] . identifier[befriend] ( identifier[self] ):
identifier[self] . identifier[befriend] ( identifier[friend] , identifier[force] = keyword[True] )
identifier[self] . identifier[debug] ( literal[string] . identifier[format] ( identifier[friend] . identifier[id] ))
identifier[befriended] = keyword[True]
keyword[else] :
identifier[self] . identifier[debug] ( literal[string] . identifier[format] ( identifier[friend] . identifier[id] ))
keyword[return] identifier[befriended] | def try_friends(self, others):
""" Look for random agents around me and try to befriend them"""
befriended = False
k = int(10 * self['openness'])
shuffle(others)
for friend in islice(others, k): # random.choice >= 3.7
if friend == self:
continue # depends on [control=['if'], data=[]]
if friend.befriend(self):
self.befriend(friend, force=True)
self.debug('Hooray! new friend: {}'.format(friend.id))
befriended = True # depends on [control=['if'], data=[]]
else:
self.debug('{} does not want to be friends'.format(friend.id)) # depends on [control=['for'], data=['friend']]
return befriended |
def prepare_csv_to_dataframe(data_file, config, use_target=True):
"""
Parses the given data file following the data model of the given configuration.
@return: pandas DataFrame
"""
names, dtypes = [], []
model = config.get_data_model()
for feature in model:
assert feature.get_name() not in names, "Two features can't have the same name."
if not use_target and feature.is_target():
continue
names.append(feature.get_name())
data = pd.read_csv(data_file, names=names)
transform_categorical_features(config, data, use_target)
return data | def function[prepare_csv_to_dataframe, parameter[data_file, config, use_target]]:
constant[
Parses the given data file following the data model of the given configuration.
@return: pandas DataFrame
]
<ast.Tuple object at 0x7da207f01780> assign[=] tuple[[<ast.List object at 0x7da207f00f40>, <ast.List object at 0x7da207f02110>]]
variable[model] assign[=] call[name[config].get_data_model, parameter[]]
for taget[name[feature]] in starred[name[model]] begin[:]
assert[compare[call[name[feature].get_name, parameter[]] <ast.NotIn object at 0x7da2590d7190> name[names]]]
if <ast.BoolOp object at 0x7da18eb56260> begin[:]
continue
call[name[names].append, parameter[call[name[feature].get_name, parameter[]]]]
variable[data] assign[=] call[name[pd].read_csv, parameter[name[data_file]]]
call[name[transform_categorical_features], parameter[name[config], name[data], name[use_target]]]
return[name[data]] | keyword[def] identifier[prepare_csv_to_dataframe] ( identifier[data_file] , identifier[config] , identifier[use_target] = keyword[True] ):
literal[string]
identifier[names] , identifier[dtypes] =[],[]
identifier[model] = identifier[config] . identifier[get_data_model] ()
keyword[for] identifier[feature] keyword[in] identifier[model] :
keyword[assert] identifier[feature] . identifier[get_name] () keyword[not] keyword[in] identifier[names] , literal[string]
keyword[if] keyword[not] identifier[use_target] keyword[and] identifier[feature] . identifier[is_target] ():
keyword[continue]
identifier[names] . identifier[append] ( identifier[feature] . identifier[get_name] ())
identifier[data] = identifier[pd] . identifier[read_csv] ( identifier[data_file] , identifier[names] = identifier[names] )
identifier[transform_categorical_features] ( identifier[config] , identifier[data] , identifier[use_target] )
keyword[return] identifier[data] | def prepare_csv_to_dataframe(data_file, config, use_target=True):
"""
Parses the given data file following the data model of the given configuration.
@return: pandas DataFrame
"""
(names, dtypes) = ([], [])
model = config.get_data_model()
for feature in model:
assert feature.get_name() not in names, "Two features can't have the same name."
if not use_target and feature.is_target():
continue # depends on [control=['if'], data=[]]
names.append(feature.get_name()) # depends on [control=['for'], data=['feature']]
data = pd.read_csv(data_file, names=names)
transform_categorical_features(config, data, use_target)
return data |
def usable_class_name(node):
"""Make a reasonable class name for a class node."""
name = node.qname()
for prefix in ["__builtin__.", "builtins.", "."]:
if name.startswith(prefix):
name = name[len(prefix):]
return name | def function[usable_class_name, parameter[node]]:
constant[Make a reasonable class name for a class node.]
variable[name] assign[=] call[name[node].qname, parameter[]]
for taget[name[prefix]] in starred[list[[<ast.Constant object at 0x7da1b26acc70>, <ast.Constant object at 0x7da1b26ace80>, <ast.Constant object at 0x7da1b26ada80>]]] begin[:]
if call[name[name].startswith, parameter[name[prefix]]] begin[:]
variable[name] assign[=] call[name[name]][<ast.Slice object at 0x7da1b26aece0>]
return[name[name]] | keyword[def] identifier[usable_class_name] ( identifier[node] ):
literal[string]
identifier[name] = identifier[node] . identifier[qname] ()
keyword[for] identifier[prefix] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
keyword[if] identifier[name] . identifier[startswith] ( identifier[prefix] ):
identifier[name] = identifier[name] [ identifier[len] ( identifier[prefix] ):]
keyword[return] identifier[name] | def usable_class_name(node):
"""Make a reasonable class name for a class node."""
name = node.qname()
for prefix in ['__builtin__.', 'builtins.', '.']:
if name.startswith(prefix):
name = name[len(prefix):] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['prefix']]
return name |
def record_timestamp(pathname_full):
"""Record the timestamp of running in a dotfile."""
if Settings.test or Settings.list_only or not Settings.record_timestamp:
return
if not Settings.follow_symlinks and os.path.islink(pathname_full):
if Settings.verbose:
print('Not setting timestamp because not following symlinks')
return
if not os.path.isdir(pathname_full):
if Settings.verbose:
print('Not setting timestamp for a non-directory')
return
record_filename_full = os.path.join(pathname_full, RECORD_FILENAME)
try:
with open(record_filename_full, 'w'):
os.utime(record_filename_full, None)
if Settings.verbose:
print("Set timestamp: {}".format(record_filename_full))
for fname in OLD_TIMESTAMPS:
if fname.startswith(pathname_full) and \
fname != record_filename_full:
# only remove timestamps below the curent path
# but don't remove the timestamp we just set!
os.remove(fname)
if Settings.verbose:
print('Removed old timestamp: {}'.format(fname))
except IOError:
print("Could not set timestamp in {}".format(pathname_full)) | def function[record_timestamp, parameter[pathname_full]]:
constant[Record the timestamp of running in a dotfile.]
if <ast.BoolOp object at 0x7da204963580> begin[:]
return[None]
if <ast.BoolOp object at 0x7da204961600> begin[:]
if name[Settings].verbose begin[:]
call[name[print], parameter[constant[Not setting timestamp because not following symlinks]]]
return[None]
if <ast.UnaryOp object at 0x7da2049620e0> begin[:]
if name[Settings].verbose begin[:]
call[name[print], parameter[constant[Not setting timestamp for a non-directory]]]
return[None]
variable[record_filename_full] assign[=] call[name[os].path.join, parameter[name[pathname_full], name[RECORD_FILENAME]]]
<ast.Try object at 0x7da2044c3460> | keyword[def] identifier[record_timestamp] ( identifier[pathname_full] ):
literal[string]
keyword[if] identifier[Settings] . identifier[test] keyword[or] identifier[Settings] . identifier[list_only] keyword[or] keyword[not] identifier[Settings] . identifier[record_timestamp] :
keyword[return]
keyword[if] keyword[not] identifier[Settings] . identifier[follow_symlinks] keyword[and] identifier[os] . identifier[path] . identifier[islink] ( identifier[pathname_full] ):
keyword[if] identifier[Settings] . identifier[verbose] :
identifier[print] ( literal[string] )
keyword[return]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[pathname_full] ):
keyword[if] identifier[Settings] . identifier[verbose] :
identifier[print] ( literal[string] )
keyword[return]
identifier[record_filename_full] = identifier[os] . identifier[path] . identifier[join] ( identifier[pathname_full] , identifier[RECORD_FILENAME] )
keyword[try] :
keyword[with] identifier[open] ( identifier[record_filename_full] , literal[string] ):
identifier[os] . identifier[utime] ( identifier[record_filename_full] , keyword[None] )
keyword[if] identifier[Settings] . identifier[verbose] :
identifier[print] ( literal[string] . identifier[format] ( identifier[record_filename_full] ))
keyword[for] identifier[fname] keyword[in] identifier[OLD_TIMESTAMPS] :
keyword[if] identifier[fname] . identifier[startswith] ( identifier[pathname_full] ) keyword[and] identifier[fname] != identifier[record_filename_full] :
identifier[os] . identifier[remove] ( identifier[fname] )
keyword[if] identifier[Settings] . identifier[verbose] :
identifier[print] ( literal[string] . identifier[format] ( identifier[fname] ))
keyword[except] identifier[IOError] :
identifier[print] ( literal[string] . identifier[format] ( identifier[pathname_full] )) | def record_timestamp(pathname_full):
"""Record the timestamp of running in a dotfile."""
if Settings.test or Settings.list_only or (not Settings.record_timestamp):
return # depends on [control=['if'], data=[]]
if not Settings.follow_symlinks and os.path.islink(pathname_full):
if Settings.verbose:
print('Not setting timestamp because not following symlinks') # depends on [control=['if'], data=[]]
return # depends on [control=['if'], data=[]]
if not os.path.isdir(pathname_full):
if Settings.verbose:
print('Not setting timestamp for a non-directory') # depends on [control=['if'], data=[]]
return # depends on [control=['if'], data=[]]
record_filename_full = os.path.join(pathname_full, RECORD_FILENAME)
try:
with open(record_filename_full, 'w'):
os.utime(record_filename_full, None) # depends on [control=['with'], data=[]]
if Settings.verbose:
print('Set timestamp: {}'.format(record_filename_full)) # depends on [control=['if'], data=[]]
for fname in OLD_TIMESTAMPS:
if fname.startswith(pathname_full) and fname != record_filename_full:
# only remove timestamps below the curent path
# but don't remove the timestamp we just set!
os.remove(fname)
if Settings.verbose:
print('Removed old timestamp: {}'.format(fname)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['fname']] # depends on [control=['try'], data=[]]
except IOError:
print('Could not set timestamp in {}'.format(pathname_full)) # depends on [control=['except'], data=[]] |
def _is_impossible_by_count(self, state):
"""Disallow any board that has insufficient tile count to solve."""
# count all the tile types and name them for readability
counts = {tile_type: 0 for tile_type in base.Tile._all_types}
standard_wildcard_type = '2'
for p, tile in state.board.positions_with_tile():
# count all wildcards as one value
tile_type = tile._type
try:
int(tile_type)
counts[standard_wildcard_type] += 1
except ValueError:
counts[tile_type] += 1
skullbomb = counts['*']
skull = counts['s']
wildcard = counts[standard_wildcard_type]
red = counts['r']
green = counts['g']
blue = counts['b']
yellow = counts['y']
exp = counts['x']
money = counts['m']
# always allow skullbomb with enough skulls
if skullbomb and skullbomb + skull >= 3:
return False
# always allow wildcard with enough of one color
if wildcard:
if any(wildcard + color >= 3
for color in (red, green, blue, yellow)):
return False
# disallow simple cases since special cases didn't occur
if any(tile and tile < 3 for tile in (red, green, blue, yellow,
exp, money, skull)):
return True
# allow the state if counts seem ok
return False | def function[_is_impossible_by_count, parameter[self, state]]:
constant[Disallow any board that has insufficient tile count to solve.]
variable[counts] assign[=] <ast.DictComp object at 0x7da1b25d6440>
variable[standard_wildcard_type] assign[=] constant[2]
for taget[tuple[[<ast.Name object at 0x7da1b25d5360>, <ast.Name object at 0x7da1b25d4d90>]]] in starred[call[name[state].board.positions_with_tile, parameter[]]] begin[:]
variable[tile_type] assign[=] name[tile]._type
<ast.Try object at 0x7da1b25d6770>
variable[skullbomb] assign[=] call[name[counts]][constant[*]]
variable[skull] assign[=] call[name[counts]][constant[s]]
variable[wildcard] assign[=] call[name[counts]][name[standard_wildcard_type]]
variable[red] assign[=] call[name[counts]][constant[r]]
variable[green] assign[=] call[name[counts]][constant[g]]
variable[blue] assign[=] call[name[counts]][constant[b]]
variable[yellow] assign[=] call[name[counts]][constant[y]]
variable[exp] assign[=] call[name[counts]][constant[x]]
variable[money] assign[=] call[name[counts]][constant[m]]
if <ast.BoolOp object at 0x7da1b25d5510> begin[:]
return[constant[False]]
if name[wildcard] begin[:]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b25d5f30>]] begin[:]
return[constant[False]]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b25d7340>]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[_is_impossible_by_count] ( identifier[self] , identifier[state] ):
literal[string]
identifier[counts] ={ identifier[tile_type] : literal[int] keyword[for] identifier[tile_type] keyword[in] identifier[base] . identifier[Tile] . identifier[_all_types] }
identifier[standard_wildcard_type] = literal[string]
keyword[for] identifier[p] , identifier[tile] keyword[in] identifier[state] . identifier[board] . identifier[positions_with_tile] ():
identifier[tile_type] = identifier[tile] . identifier[_type]
keyword[try] :
identifier[int] ( identifier[tile_type] )
identifier[counts] [ identifier[standard_wildcard_type] ]+= literal[int]
keyword[except] identifier[ValueError] :
identifier[counts] [ identifier[tile_type] ]+= literal[int]
identifier[skullbomb] = identifier[counts] [ literal[string] ]
identifier[skull] = identifier[counts] [ literal[string] ]
identifier[wildcard] = identifier[counts] [ identifier[standard_wildcard_type] ]
identifier[red] = identifier[counts] [ literal[string] ]
identifier[green] = identifier[counts] [ literal[string] ]
identifier[blue] = identifier[counts] [ literal[string] ]
identifier[yellow] = identifier[counts] [ literal[string] ]
identifier[exp] = identifier[counts] [ literal[string] ]
identifier[money] = identifier[counts] [ literal[string] ]
keyword[if] identifier[skullbomb] keyword[and] identifier[skullbomb] + identifier[skull] >= literal[int] :
keyword[return] keyword[False]
keyword[if] identifier[wildcard] :
keyword[if] identifier[any] ( identifier[wildcard] + identifier[color] >= literal[int]
keyword[for] identifier[color] keyword[in] ( identifier[red] , identifier[green] , identifier[blue] , identifier[yellow] )):
keyword[return] keyword[False]
keyword[if] identifier[any] ( identifier[tile] keyword[and] identifier[tile] < literal[int] keyword[for] identifier[tile] keyword[in] ( identifier[red] , identifier[green] , identifier[blue] , identifier[yellow] ,
identifier[exp] , identifier[money] , identifier[skull] )):
keyword[return] keyword[True]
keyword[return] keyword[False] | def _is_impossible_by_count(self, state):
"""Disallow any board that has insufficient tile count to solve."""
# count all the tile types and name them for readability
counts = {tile_type: 0 for tile_type in base.Tile._all_types}
standard_wildcard_type = '2'
for (p, tile) in state.board.positions_with_tile():
# count all wildcards as one value
tile_type = tile._type
try:
int(tile_type)
counts[standard_wildcard_type] += 1 # depends on [control=['try'], data=[]]
except ValueError:
counts[tile_type] += 1 # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]]
skullbomb = counts['*']
skull = counts['s']
wildcard = counts[standard_wildcard_type]
red = counts['r']
green = counts['g']
blue = counts['b']
yellow = counts['y']
exp = counts['x']
money = counts['m']
# always allow skullbomb with enough skulls
if skullbomb and skullbomb + skull >= 3:
return False # depends on [control=['if'], data=[]]
# always allow wildcard with enough of one color
if wildcard:
if any((wildcard + color >= 3 for color in (red, green, blue, yellow))):
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# disallow simple cases since special cases didn't occur
if any((tile and tile < 3 for tile in (red, green, blue, yellow, exp, money, skull))):
return True # depends on [control=['if'], data=[]]
# allow the state if counts seem ok
return False |
def sorensen(seq1, seq2):
"""Compute the Sorensen distance between the two sequences `seq1` and `seq2`.
They should contain hashable items.
The return value is a float between 0 and 1, where 0 means equal, and 1 totally different.
"""
set1, set2 = set(seq1), set(seq2)
return 1 - (2 * len(set1 & set2) / float(len(set1) + len(set2))) | def function[sorensen, parameter[seq1, seq2]]:
constant[Compute the Sorensen distance between the two sequences `seq1` and `seq2`.
They should contain hashable items.
The return value is a float between 0 and 1, where 0 means equal, and 1 totally different.
]
<ast.Tuple object at 0x7da2054a7610> assign[=] tuple[[<ast.Call object at 0x7da2054a6ad0>, <ast.Call object at 0x7da2054a7910>]]
return[binary_operation[constant[1] - binary_operation[binary_operation[constant[2] * call[name[len], parameter[binary_operation[name[set1] <ast.BitAnd object at 0x7da2590d6b60> name[set2]]]]] / call[name[float], parameter[binary_operation[call[name[len], parameter[name[set1]]] + call[name[len], parameter[name[set2]]]]]]]]] | keyword[def] identifier[sorensen] ( identifier[seq1] , identifier[seq2] ):
literal[string]
identifier[set1] , identifier[set2] = identifier[set] ( identifier[seq1] ), identifier[set] ( identifier[seq2] )
keyword[return] literal[int] -( literal[int] * identifier[len] ( identifier[set1] & identifier[set2] )/ identifier[float] ( identifier[len] ( identifier[set1] )+ identifier[len] ( identifier[set2] ))) | def sorensen(seq1, seq2):
"""Compute the Sorensen distance between the two sequences `seq1` and `seq2`.
They should contain hashable items.
The return value is a float between 0 and 1, where 0 means equal, and 1 totally different.
"""
(set1, set2) = (set(seq1), set(seq2))
return 1 - 2 * len(set1 & set2) / float(len(set1) + len(set2)) |
def getbyuuid(self, uuid):
"""Get a schema by given uuid.
:param str uuid: schema uuid to retrieve.
:rtype: Schema
:raises: KeyError if uuid is not registered already.
"""
if uuid not in self._schbyuuid:
raise KeyError('uuid {0} not registered'.format(uuid))
return self._schbyuuid[uuid] | def function[getbyuuid, parameter[self, uuid]]:
constant[Get a schema by given uuid.
:param str uuid: schema uuid to retrieve.
:rtype: Schema
:raises: KeyError if uuid is not registered already.
]
if compare[name[uuid] <ast.NotIn object at 0x7da2590d7190> name[self]._schbyuuid] begin[:]
<ast.Raise object at 0x7da1b14d7940>
return[call[name[self]._schbyuuid][name[uuid]]] | keyword[def] identifier[getbyuuid] ( identifier[self] , identifier[uuid] ):
literal[string]
keyword[if] identifier[uuid] keyword[not] keyword[in] identifier[self] . identifier[_schbyuuid] :
keyword[raise] identifier[KeyError] ( literal[string] . identifier[format] ( identifier[uuid] ))
keyword[return] identifier[self] . identifier[_schbyuuid] [ identifier[uuid] ] | def getbyuuid(self, uuid):
"""Get a schema by given uuid.
:param str uuid: schema uuid to retrieve.
:rtype: Schema
:raises: KeyError if uuid is not registered already.
"""
if uuid not in self._schbyuuid:
raise KeyError('uuid {0} not registered'.format(uuid)) # depends on [control=['if'], data=['uuid']]
return self._schbyuuid[uuid] |
def get_connected_endpoints(self, **kwargs): # noqa: E501
"""(DEPRECATED) List registered endpoints. The number of returned endpoints is currently limited to 200. # noqa: E501
Endpoints are physical devices having valid registration to Device Management. All devices regardless of registration status can be requested from Device Directory API ['/v3/devices/`](/docs/current/service-api-references/device-directory.html). **Note:** This endpoint is deprecated and will be removed 1Q/18. You should use the Device Directory API [`/v3/devices/`](/docs/current/service-api-references/device-directory.html). To list only the registered devices, use filter `/v3/devices/?filter=state%3Dregistered`. **Example usage:** curl -X GET https://api.us-east-1.mbedcloud.com/v2/endpoints -H 'authorization: Bearer {api-key}' # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.get_connected_endpoints(asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param str type: Filter endpoints by endpoint-type.
:return: list[Endpoint]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('asynchronous'):
return self.get_connected_endpoints_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_connected_endpoints_with_http_info(**kwargs) # noqa: E501
return data | def function[get_connected_endpoints, parameter[self]]:
constant[(DEPRECATED) List registered endpoints. The number of returned endpoints is currently limited to 200. # noqa: E501
Endpoints are physical devices having valid registration to Device Management. All devices regardless of registration status can be requested from Device Directory API ['/v3/devices/`](/docs/current/service-api-references/device-directory.html). **Note:** This endpoint is deprecated and will be removed 1Q/18. You should use the Device Directory API [`/v3/devices/`](/docs/current/service-api-references/device-directory.html). To list only the registered devices, use filter `/v3/devices/?filter=state%3Dregistered`. **Example usage:** curl -X GET https://api.us-east-1.mbedcloud.com/v2/endpoints -H 'authorization: Bearer {api-key}' # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.get_connected_endpoints(asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param str type: Filter endpoints by endpoint-type.
:return: list[Endpoint]
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[asynchronous]]] begin[:]
return[call[name[self].get_connected_endpoints_with_http_info, parameter[]]] | keyword[def] identifier[get_connected_endpoints] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[get_connected_endpoints_with_http_info] (** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[get_connected_endpoints_with_http_info] (** identifier[kwargs] )
keyword[return] identifier[data] | def get_connected_endpoints(self, **kwargs): # noqa: E501
"(DEPRECATED) List registered endpoints. The number of returned endpoints is currently limited to 200. # noqa: E501\n\n Endpoints are physical devices having valid registration to Device Management. All devices regardless of registration status can be requested from Device Directory API ['/v3/devices/`](/docs/current/service-api-references/device-directory.html). **Note:** This endpoint is deprecated and will be removed 1Q/18. You should use the Device Directory API [`/v3/devices/`](/docs/current/service-api-references/device-directory.html). To list only the registered devices, use filter `/v3/devices/?filter=state%3Dregistered`. **Example usage:** curl -X GET https://api.us-east-1.mbedcloud.com/v2/endpoints -H 'authorization: Bearer {api-key}' # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass asynchronous=True\n >>> thread = api.get_connected_endpoints(asynchronous=True)\n >>> result = thread.get()\n\n :param asynchronous bool\n :param str type: Filter endpoints by endpoint-type.\n :return: list[Endpoint]\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('asynchronous'):
return self.get_connected_endpoints_with_http_info(**kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.get_connected_endpoints_with_http_info(**kwargs) # noqa: E501
return data |
def DampedPowerlaw(q, a, alpha, sigma):
"""Damped power-law
Inputs:
-------
``q``: independent variable
``a``: factor
``alpha``: exponent
``sigma``: hwhm of the damping Gaussian
Formula:
--------
``a*q^alpha*exp(-q^2/(2*sigma^2))``
"""
return a * q ** alpha * np.exp(-q ** 2 / (2 * sigma ** 2)) | def function[DampedPowerlaw, parameter[q, a, alpha, sigma]]:
constant[Damped power-law
Inputs:
-------
``q``: independent variable
``a``: factor
``alpha``: exponent
``sigma``: hwhm of the damping Gaussian
Formula:
--------
``a*q^alpha*exp(-q^2/(2*sigma^2))``
]
return[binary_operation[binary_operation[name[a] * binary_operation[name[q] ** name[alpha]]] * call[name[np].exp, parameter[binary_operation[<ast.UnaryOp object at 0x7da1b1051cc0> / binary_operation[constant[2] * binary_operation[name[sigma] ** constant[2]]]]]]]] | keyword[def] identifier[DampedPowerlaw] ( identifier[q] , identifier[a] , identifier[alpha] , identifier[sigma] ):
literal[string]
keyword[return] identifier[a] * identifier[q] ** identifier[alpha] * identifier[np] . identifier[exp] (- identifier[q] ** literal[int] /( literal[int] * identifier[sigma] ** literal[int] )) | def DampedPowerlaw(q, a, alpha, sigma):
"""Damped power-law
Inputs:
-------
``q``: independent variable
``a``: factor
``alpha``: exponent
``sigma``: hwhm of the damping Gaussian
Formula:
--------
``a*q^alpha*exp(-q^2/(2*sigma^2))``
"""
return a * q ** alpha * np.exp(-q ** 2 / (2 * sigma ** 2)) |
def state_dict(self) -> Dict[str, Any]:
"""
A ``Trainer`` can use this to serialize the state of the metric tracker.
"""
return {
"best_so_far": self._best_so_far,
"patience": self._patience,
"epochs_with_no_improvement": self._epochs_with_no_improvement,
"is_best_so_far": self._is_best_so_far,
"should_decrease": self._should_decrease,
"best_epoch_metrics": self.best_epoch_metrics,
"epoch_number": self._epoch_number,
"best_epoch": self.best_epoch
} | def function[state_dict, parameter[self]]:
constant[
A ``Trainer`` can use this to serialize the state of the metric tracker.
]
return[dictionary[[<ast.Constant object at 0x7da1b1fff040>, <ast.Constant object at 0x7da1b1ffc910>, <ast.Constant object at 0x7da1b1ffd750>, <ast.Constant object at 0x7da1b1ffd060>, <ast.Constant object at 0x7da1b1fffee0>, <ast.Constant object at 0x7da1b1fffd60>, <ast.Constant object at 0x7da1b1ffff70>, <ast.Constant object at 0x7da1b1ffc430>], [<ast.Attribute object at 0x7da1b1ffe0b0>, <ast.Attribute object at 0x7da1b1ffc7c0>, <ast.Attribute object at 0x7da1b1ffe560>, <ast.Attribute object at 0x7da1b1ffd3f0>, <ast.Attribute object at 0x7da1b1fff9a0>, <ast.Attribute object at 0x7da1b1ffd360>, <ast.Attribute object at 0x7da1b1ffcdc0>, <ast.Attribute object at 0x7da1b1ffdea0>]]] | keyword[def] identifier[state_dict] ( identifier[self] )-> identifier[Dict] [ identifier[str] , identifier[Any] ]:
literal[string]
keyword[return] {
literal[string] : identifier[self] . identifier[_best_so_far] ,
literal[string] : identifier[self] . identifier[_patience] ,
literal[string] : identifier[self] . identifier[_epochs_with_no_improvement] ,
literal[string] : identifier[self] . identifier[_is_best_so_far] ,
literal[string] : identifier[self] . identifier[_should_decrease] ,
literal[string] : identifier[self] . identifier[best_epoch_metrics] ,
literal[string] : identifier[self] . identifier[_epoch_number] ,
literal[string] : identifier[self] . identifier[best_epoch]
} | def state_dict(self) -> Dict[str, Any]:
"""
A ``Trainer`` can use this to serialize the state of the metric tracker.
"""
return {'best_so_far': self._best_so_far, 'patience': self._patience, 'epochs_with_no_improvement': self._epochs_with_no_improvement, 'is_best_so_far': self._is_best_so_far, 'should_decrease': self._should_decrease, 'best_epoch_metrics': self.best_epoch_metrics, 'epoch_number': self._epoch_number, 'best_epoch': self.best_epoch} |
def push_app(self, content, content_url=None):
'''Push a notification to a Pushed application.
Param: content -> content of Pushed notification message
content_url (optional) -> enrich message with URL
Returns Shipment ID as string
'''
parameters = {
'app_key': self.app_key,
'app_secret': self.app_secret
}
return self._push(content, 'app', parameters, content_url) | def function[push_app, parameter[self, content, content_url]]:
constant[Push a notification to a Pushed application.
Param: content -> content of Pushed notification message
content_url (optional) -> enrich message with URL
Returns Shipment ID as string
]
variable[parameters] assign[=] dictionary[[<ast.Constant object at 0x7da1b0838b20>, <ast.Constant object at 0x7da1b083ab90>], [<ast.Attribute object at 0x7da1b0838790>, <ast.Attribute object at 0x7da1b083bfa0>]]
return[call[name[self]._push, parameter[name[content], constant[app], name[parameters], name[content_url]]]] | keyword[def] identifier[push_app] ( identifier[self] , identifier[content] , identifier[content_url] = keyword[None] ):
literal[string]
identifier[parameters] ={
literal[string] : identifier[self] . identifier[app_key] ,
literal[string] : identifier[self] . identifier[app_secret]
}
keyword[return] identifier[self] . identifier[_push] ( identifier[content] , literal[string] , identifier[parameters] , identifier[content_url] ) | def push_app(self, content, content_url=None):
"""Push a notification to a Pushed application.
Param: content -> content of Pushed notification message
content_url (optional) -> enrich message with URL
Returns Shipment ID as string
"""
parameters = {'app_key': self.app_key, 'app_secret': self.app_secret}
return self._push(content, 'app', parameters, content_url) |
def to_table(self):
"""
Convert an ArrayWrapper with shape (D1, ..., DN) and attributes
T1, ..., TN which are list of tags of lenghts D1, ... DN into
a table with rows (tag1, ... tagN, value) of maximum length
D1 * ... * DN. Zero values are discarded.
>>> from pprint import pprint
>>> dic = dict(tagnames=['taxonomy', 'occupancy'],
... taxonomy=['?', 'RC', 'WOOD'],
... occupancy=['?', 'RES', 'IND', 'COM'])
>>> arr = numpy.zeros((2, 3))
>>> arr[0, 0] = 2000
>>> arr[0, 1] = 5000
>>> arr[1, 0] = 500
>>> pprint(ArrayWrapper(arr, dic).to_table())
[['taxonomy', 'occupancy', 'value'],
['RC', 'RES', 2000.0],
['RC', 'IND', 5000.0],
['WOOD', 'RES', 500.0]]
"""
shape = self.shape
# the tagnames are bytestrings so they must be decoded
tagnames = decode_array(self.tagnames)
if len(shape) == len(tagnames):
return [tagnames + ['value']] + self._to_table()
elif len(shape) == len(tagnames) + 1: # there is an extra field
tbl = [tagnames + [self.extra[0], 'value']]
return tbl + self._to_table(self.extra[1:])
else:
raise TypeError(
'There are %d dimensions but only %d tagnames' %
(len(shape), len(tagnames))) | def function[to_table, parameter[self]]:
constant[
Convert an ArrayWrapper with shape (D1, ..., DN) and attributes
T1, ..., TN which are list of tags of lenghts D1, ... DN into
a table with rows (tag1, ... tagN, value) of maximum length
D1 * ... * DN. Zero values are discarded.
>>> from pprint import pprint
>>> dic = dict(tagnames=['taxonomy', 'occupancy'],
... taxonomy=['?', 'RC', 'WOOD'],
... occupancy=['?', 'RES', 'IND', 'COM'])
>>> arr = numpy.zeros((2, 3))
>>> arr[0, 0] = 2000
>>> arr[0, 1] = 5000
>>> arr[1, 0] = 500
>>> pprint(ArrayWrapper(arr, dic).to_table())
[['taxonomy', 'occupancy', 'value'],
['RC', 'RES', 2000.0],
['RC', 'IND', 5000.0],
['WOOD', 'RES', 500.0]]
]
variable[shape] assign[=] name[self].shape
variable[tagnames] assign[=] call[name[decode_array], parameter[name[self].tagnames]]
if compare[call[name[len], parameter[name[shape]]] equal[==] call[name[len], parameter[name[tagnames]]]] begin[:]
return[binary_operation[list[[<ast.BinOp object at 0x7da1b26adae0>]] + call[name[self]._to_table, parameter[]]]] | keyword[def] identifier[to_table] ( identifier[self] ):
literal[string]
identifier[shape] = identifier[self] . identifier[shape]
identifier[tagnames] = identifier[decode_array] ( identifier[self] . identifier[tagnames] )
keyword[if] identifier[len] ( identifier[shape] )== identifier[len] ( identifier[tagnames] ):
keyword[return] [ identifier[tagnames] +[ literal[string] ]]+ identifier[self] . identifier[_to_table] ()
keyword[elif] identifier[len] ( identifier[shape] )== identifier[len] ( identifier[tagnames] )+ literal[int] :
identifier[tbl] =[ identifier[tagnames] +[ identifier[self] . identifier[extra] [ literal[int] ], literal[string] ]]
keyword[return] identifier[tbl] + identifier[self] . identifier[_to_table] ( identifier[self] . identifier[extra] [ literal[int] :])
keyword[else] :
keyword[raise] identifier[TypeError] (
literal[string] %
( identifier[len] ( identifier[shape] ), identifier[len] ( identifier[tagnames] ))) | def to_table(self):
"""
Convert an ArrayWrapper with shape (D1, ..., DN) and attributes
T1, ..., TN which are list of tags of lenghts D1, ... DN into
a table with rows (tag1, ... tagN, value) of maximum length
D1 * ... * DN. Zero values are discarded.
>>> from pprint import pprint
>>> dic = dict(tagnames=['taxonomy', 'occupancy'],
... taxonomy=['?', 'RC', 'WOOD'],
... occupancy=['?', 'RES', 'IND', 'COM'])
>>> arr = numpy.zeros((2, 3))
>>> arr[0, 0] = 2000
>>> arr[0, 1] = 5000
>>> arr[1, 0] = 500
>>> pprint(ArrayWrapper(arr, dic).to_table())
[['taxonomy', 'occupancy', 'value'],
['RC', 'RES', 2000.0],
['RC', 'IND', 5000.0],
['WOOD', 'RES', 500.0]]
"""
shape = self.shape
# the tagnames are bytestrings so they must be decoded
tagnames = decode_array(self.tagnames)
if len(shape) == len(tagnames):
return [tagnames + ['value']] + self._to_table() # depends on [control=['if'], data=[]]
elif len(shape) == len(tagnames) + 1: # there is an extra field
tbl = [tagnames + [self.extra[0], 'value']]
return tbl + self._to_table(self.extra[1:]) # depends on [control=['if'], data=[]]
else:
raise TypeError('There are %d dimensions but only %d tagnames' % (len(shape), len(tagnames))) |
def _to_dict_of_blocks(self, copy=True):
"""
Return a dict of dtype -> Constructor Types that
each is a homogeneous dtype.
Internal ONLY
"""
return {k: self._constructor(v).__finalize__(self)
for k, v, in self._data.to_dict(copy=copy).items()} | def function[_to_dict_of_blocks, parameter[self, copy]]:
constant[
Return a dict of dtype -> Constructor Types that
each is a homogeneous dtype.
Internal ONLY
]
return[<ast.DictComp object at 0x7da18ede6fb0>] | keyword[def] identifier[_to_dict_of_blocks] ( identifier[self] , identifier[copy] = keyword[True] ):
literal[string]
keyword[return] { identifier[k] : identifier[self] . identifier[_constructor] ( identifier[v] ). identifier[__finalize__] ( identifier[self] )
keyword[for] identifier[k] , identifier[v] , keyword[in] identifier[self] . identifier[_data] . identifier[to_dict] ( identifier[copy] = identifier[copy] ). identifier[items] ()} | def _to_dict_of_blocks(self, copy=True):
"""
Return a dict of dtype -> Constructor Types that
each is a homogeneous dtype.
Internal ONLY
"""
return {k: self._constructor(v).__finalize__(self) for (k, v) in self._data.to_dict(copy=copy).items()} |
def shot_chart(x, y, kind="scatter", title="", color="b", cmap=None,
xlim=(-250, 250), ylim=(422.5, -47.5),
court_color="gray", court_lw=1, outer_lines=False,
flip_court=False, kde_shade=True, gridsize=None, ax=None,
despine=False, **kwargs):
"""
Returns an Axes object with player shots plotted.
Parameters
----------
x, y : strings or vector
The x and y coordinates of the shots taken. They can be passed in as
vectors (such as a pandas Series) or as columns from the pandas
DataFrame passed into ``data``.
data : DataFrame, optional
DataFrame containing shots where ``x`` and ``y`` represent the
shot location coordinates.
kind : { "scatter", "kde", "hex" }, optional
The kind of shot chart to create.
title : str, optional
The title for the plot.
color : matplotlib color, optional
Color used to plot the shots
cmap : matplotlib Colormap object or name, optional
Colormap for the range of data values. If one isn't provided, the
colormap is derived from the valuue passed to ``color``. Used for KDE
and Hexbin plots.
{x, y}lim : two-tuples, optional
The axis limits of the plot.
court_color : matplotlib color, optional
The color of the court lines.
court_lw : float, optional
The linewidth the of the court lines.
outer_lines : boolean, optional
If ``True`` the out of bound lines are drawn in as a matplotlib
Rectangle.
flip_court : boolean, optional
If ``True`` orients the hoop towards the bottom of the plot. Default
is ``False``, which orients the court where the hoop is towards the top
of the plot.
kde_shade : boolean, optional
Default is ``True``, which shades in the KDE contours.
gridsize : int, optional
Number of hexagons in the x-direction. The default is calculated using
the Freedman-Diaconis method.
ax : Axes, optional
The Axes object to plot the court onto.
despine : boolean, optional
If ``True``, removes the spines.
kwargs : key, value pairs
Keyword arguments for matplotlib Collection properties or seaborn plots.
Returns
-------
ax : Axes
The Axes object with the shot chart plotted on it.
"""
if ax is None:
ax = plt.gca()
if cmap is None:
cmap = sns.light_palette(color, as_cmap=True)
if not flip_court:
ax.set_xlim(xlim)
ax.set_ylim(ylim)
else:
ax.set_xlim(xlim[::-1])
ax.set_ylim(ylim[::-1])
ax.tick_params(labelbottom="off", labelleft="off")
ax.set_title(title, fontsize=18)
draw_court(ax, color=court_color, lw=court_lw, outer_lines=outer_lines)
if kind == "scatter":
ax.scatter(x, y, c=color, **kwargs)
elif kind == "kde":
sns.kdeplot(x, y, shade=kde_shade, cmap=cmap, ax=ax, **kwargs)
ax.set_xlabel('')
ax.set_ylabel('')
elif kind == "hex":
if gridsize is None:
# Get the number of bins for hexbin using Freedman-Diaconis rule
# This is idea was taken from seaborn, which got the calculation
# from http://stats.stackexchange.com/questions/798/
from seaborn.distributions import _freedman_diaconis_bins
x_bin = _freedman_diaconis_bins(x)
y_bin = _freedman_diaconis_bins(y)
gridsize = int(np.mean([x_bin, y_bin]))
ax.hexbin(x, y, gridsize=gridsize, cmap=cmap, **kwargs)
else:
raise ValueError("kind must be 'scatter', 'kde', or 'hex'.")
# Set the spines to match the rest of court lines, makes outer_lines
# somewhate unnecessary
for spine in ax.spines:
ax.spines[spine].set_lw(court_lw)
ax.spines[spine].set_color(court_color)
if despine:
ax.spines["top"].set_visible(False)
ax.spines["bottom"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["left"].set_visible(False)
return ax | def function[shot_chart, parameter[x, y, kind, title, color, cmap, xlim, ylim, court_color, court_lw, outer_lines, flip_court, kde_shade, gridsize, ax, despine]]:
constant[
Returns an Axes object with player shots plotted.
Parameters
----------
x, y : strings or vector
The x and y coordinates of the shots taken. They can be passed in as
vectors (such as a pandas Series) or as columns from the pandas
DataFrame passed into ``data``.
data : DataFrame, optional
DataFrame containing shots where ``x`` and ``y`` represent the
shot location coordinates.
kind : { "scatter", "kde", "hex" }, optional
The kind of shot chart to create.
title : str, optional
The title for the plot.
color : matplotlib color, optional
Color used to plot the shots
cmap : matplotlib Colormap object or name, optional
Colormap for the range of data values. If one isn't provided, the
colormap is derived from the valuue passed to ``color``. Used for KDE
and Hexbin plots.
{x, y}lim : two-tuples, optional
The axis limits of the plot.
court_color : matplotlib color, optional
The color of the court lines.
court_lw : float, optional
The linewidth the of the court lines.
outer_lines : boolean, optional
If ``True`` the out of bound lines are drawn in as a matplotlib
Rectangle.
flip_court : boolean, optional
If ``True`` orients the hoop towards the bottom of the plot. Default
is ``False``, which orients the court where the hoop is towards the top
of the plot.
kde_shade : boolean, optional
Default is ``True``, which shades in the KDE contours.
gridsize : int, optional
Number of hexagons in the x-direction. The default is calculated using
the Freedman-Diaconis method.
ax : Axes, optional
The Axes object to plot the court onto.
despine : boolean, optional
If ``True``, removes the spines.
kwargs : key, value pairs
Keyword arguments for matplotlib Collection properties or seaborn plots.
Returns
-------
ax : Axes
The Axes object with the shot chart plotted on it.
]
if compare[name[ax] is constant[None]] begin[:]
variable[ax] assign[=] call[name[plt].gca, parameter[]]
if compare[name[cmap] is constant[None]] begin[:]
variable[cmap] assign[=] call[name[sns].light_palette, parameter[name[color]]]
if <ast.UnaryOp object at 0x7da20c6e4760> begin[:]
call[name[ax].set_xlim, parameter[name[xlim]]]
call[name[ax].set_ylim, parameter[name[ylim]]]
call[name[ax].tick_params, parameter[]]
call[name[ax].set_title, parameter[name[title]]]
call[name[draw_court], parameter[name[ax]]]
if compare[name[kind] equal[==] constant[scatter]] begin[:]
call[name[ax].scatter, parameter[name[x], name[y]]]
for taget[name[spine]] in starred[name[ax].spines] begin[:]
call[call[name[ax].spines][name[spine]].set_lw, parameter[name[court_lw]]]
call[call[name[ax].spines][name[spine]].set_color, parameter[name[court_color]]]
if name[despine] begin[:]
call[call[name[ax].spines][constant[top]].set_visible, parameter[constant[False]]]
call[call[name[ax].spines][constant[bottom]].set_visible, parameter[constant[False]]]
call[call[name[ax].spines][constant[right]].set_visible, parameter[constant[False]]]
call[call[name[ax].spines][constant[left]].set_visible, parameter[constant[False]]]
return[name[ax]] | keyword[def] identifier[shot_chart] ( identifier[x] , identifier[y] , identifier[kind] = literal[string] , identifier[title] = literal[string] , identifier[color] = literal[string] , identifier[cmap] = keyword[None] ,
identifier[xlim] =(- literal[int] , literal[int] ), identifier[ylim] =( literal[int] ,- literal[int] ),
identifier[court_color] = literal[string] , identifier[court_lw] = literal[int] , identifier[outer_lines] = keyword[False] ,
identifier[flip_court] = keyword[False] , identifier[kde_shade] = keyword[True] , identifier[gridsize] = keyword[None] , identifier[ax] = keyword[None] ,
identifier[despine] = keyword[False] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[ax] keyword[is] keyword[None] :
identifier[ax] = identifier[plt] . identifier[gca] ()
keyword[if] identifier[cmap] keyword[is] keyword[None] :
identifier[cmap] = identifier[sns] . identifier[light_palette] ( identifier[color] , identifier[as_cmap] = keyword[True] )
keyword[if] keyword[not] identifier[flip_court] :
identifier[ax] . identifier[set_xlim] ( identifier[xlim] )
identifier[ax] . identifier[set_ylim] ( identifier[ylim] )
keyword[else] :
identifier[ax] . identifier[set_xlim] ( identifier[xlim] [::- literal[int] ])
identifier[ax] . identifier[set_ylim] ( identifier[ylim] [::- literal[int] ])
identifier[ax] . identifier[tick_params] ( identifier[labelbottom] = literal[string] , identifier[labelleft] = literal[string] )
identifier[ax] . identifier[set_title] ( identifier[title] , identifier[fontsize] = literal[int] )
identifier[draw_court] ( identifier[ax] , identifier[color] = identifier[court_color] , identifier[lw] = identifier[court_lw] , identifier[outer_lines] = identifier[outer_lines] )
keyword[if] identifier[kind] == literal[string] :
identifier[ax] . identifier[scatter] ( identifier[x] , identifier[y] , identifier[c] = identifier[color] ,** identifier[kwargs] )
keyword[elif] identifier[kind] == literal[string] :
identifier[sns] . identifier[kdeplot] ( identifier[x] , identifier[y] , identifier[shade] = identifier[kde_shade] , identifier[cmap] = identifier[cmap] , identifier[ax] = identifier[ax] ,** identifier[kwargs] )
identifier[ax] . identifier[set_xlabel] ( literal[string] )
identifier[ax] . identifier[set_ylabel] ( literal[string] )
keyword[elif] identifier[kind] == literal[string] :
keyword[if] identifier[gridsize] keyword[is] keyword[None] :
keyword[from] identifier[seaborn] . identifier[distributions] keyword[import] identifier[_freedman_diaconis_bins]
identifier[x_bin] = identifier[_freedman_diaconis_bins] ( identifier[x] )
identifier[y_bin] = identifier[_freedman_diaconis_bins] ( identifier[y] )
identifier[gridsize] = identifier[int] ( identifier[np] . identifier[mean] ([ identifier[x_bin] , identifier[y_bin] ]))
identifier[ax] . identifier[hexbin] ( identifier[x] , identifier[y] , identifier[gridsize] = identifier[gridsize] , identifier[cmap] = identifier[cmap] ,** identifier[kwargs] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[for] identifier[spine] keyword[in] identifier[ax] . identifier[spines] :
identifier[ax] . identifier[spines] [ identifier[spine] ]. identifier[set_lw] ( identifier[court_lw] )
identifier[ax] . identifier[spines] [ identifier[spine] ]. identifier[set_color] ( identifier[court_color] )
keyword[if] identifier[despine] :
identifier[ax] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[False] )
identifier[ax] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[False] )
identifier[ax] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[False] )
identifier[ax] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[False] )
keyword[return] identifier[ax] | def shot_chart(x, y, kind='scatter', title='', color='b', cmap=None, xlim=(-250, 250), ylim=(422.5, -47.5), court_color='gray', court_lw=1, outer_lines=False, flip_court=False, kde_shade=True, gridsize=None, ax=None, despine=False, **kwargs):
"""
Returns an Axes object with player shots plotted.
Parameters
----------
x, y : strings or vector
The x and y coordinates of the shots taken. They can be passed in as
vectors (such as a pandas Series) or as columns from the pandas
DataFrame passed into ``data``.
data : DataFrame, optional
DataFrame containing shots where ``x`` and ``y`` represent the
shot location coordinates.
kind : { "scatter", "kde", "hex" }, optional
The kind of shot chart to create.
title : str, optional
The title for the plot.
color : matplotlib color, optional
Color used to plot the shots
cmap : matplotlib Colormap object or name, optional
Colormap for the range of data values. If one isn't provided, the
colormap is derived from the valuue passed to ``color``. Used for KDE
and Hexbin plots.
{x, y}lim : two-tuples, optional
The axis limits of the plot.
court_color : matplotlib color, optional
The color of the court lines.
court_lw : float, optional
The linewidth the of the court lines.
outer_lines : boolean, optional
If ``True`` the out of bound lines are drawn in as a matplotlib
Rectangle.
flip_court : boolean, optional
If ``True`` orients the hoop towards the bottom of the plot. Default
is ``False``, which orients the court where the hoop is towards the top
of the plot.
kde_shade : boolean, optional
Default is ``True``, which shades in the KDE contours.
gridsize : int, optional
Number of hexagons in the x-direction. The default is calculated using
the Freedman-Diaconis method.
ax : Axes, optional
The Axes object to plot the court onto.
despine : boolean, optional
If ``True``, removes the spines.
kwargs : key, value pairs
Keyword arguments for matplotlib Collection properties or seaborn plots.
Returns
-------
ax : Axes
The Axes object with the shot chart plotted on it.
"""
if ax is None:
ax = plt.gca() # depends on [control=['if'], data=['ax']]
if cmap is None:
cmap = sns.light_palette(color, as_cmap=True) # depends on [control=['if'], data=['cmap']]
if not flip_court:
ax.set_xlim(xlim)
ax.set_ylim(ylim) # depends on [control=['if'], data=[]]
else:
ax.set_xlim(xlim[::-1])
ax.set_ylim(ylim[::-1])
ax.tick_params(labelbottom='off', labelleft='off')
ax.set_title(title, fontsize=18)
draw_court(ax, color=court_color, lw=court_lw, outer_lines=outer_lines)
if kind == 'scatter':
ax.scatter(x, y, c=color, **kwargs) # depends on [control=['if'], data=[]]
elif kind == 'kde':
sns.kdeplot(x, y, shade=kde_shade, cmap=cmap, ax=ax, **kwargs)
ax.set_xlabel('')
ax.set_ylabel('') # depends on [control=['if'], data=[]]
elif kind == 'hex':
if gridsize is None:
# Get the number of bins for hexbin using Freedman-Diaconis rule
# This is idea was taken from seaborn, which got the calculation
# from http://stats.stackexchange.com/questions/798/
from seaborn.distributions import _freedman_diaconis_bins
x_bin = _freedman_diaconis_bins(x)
y_bin = _freedman_diaconis_bins(y)
gridsize = int(np.mean([x_bin, y_bin])) # depends on [control=['if'], data=['gridsize']]
ax.hexbin(x, y, gridsize=gridsize, cmap=cmap, **kwargs) # depends on [control=['if'], data=[]]
else:
raise ValueError("kind must be 'scatter', 'kde', or 'hex'.")
# Set the spines to match the rest of court lines, makes outer_lines
# somewhate unnecessary
for spine in ax.spines:
ax.spines[spine].set_lw(court_lw)
ax.spines[spine].set_color(court_color) # depends on [control=['for'], data=['spine']]
if despine:
ax.spines['top'].set_visible(False)
ax.spines['bottom'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['left'].set_visible(False) # depends on [control=['if'], data=[]]
return ax |
def process_module_output(self, module):
"""
Process the output for a module and return a json string representing it.
Color processing occurs here.
"""
outputs = module["module"].get_latest()
color = module["color"]
if color:
for output in outputs:
# Color: substitute the config defined color
if "color" not in output:
output["color"] = color
# Create the json string output.
return ",".join([dumps(x) for x in outputs]) | def function[process_module_output, parameter[self, module]]:
constant[
Process the output for a module and return a json string representing it.
Color processing occurs here.
]
variable[outputs] assign[=] call[call[name[module]][constant[module]].get_latest, parameter[]]
variable[color] assign[=] call[name[module]][constant[color]]
if name[color] begin[:]
for taget[name[output]] in starred[name[outputs]] begin[:]
if compare[constant[color] <ast.NotIn object at 0x7da2590d7190> name[output]] begin[:]
call[name[output]][constant[color]] assign[=] name[color]
return[call[constant[,].join, parameter[<ast.ListComp object at 0x7da20c7c8c40>]]] | keyword[def] identifier[process_module_output] ( identifier[self] , identifier[module] ):
literal[string]
identifier[outputs] = identifier[module] [ literal[string] ]. identifier[get_latest] ()
identifier[color] = identifier[module] [ literal[string] ]
keyword[if] identifier[color] :
keyword[for] identifier[output] keyword[in] identifier[outputs] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[output] :
identifier[output] [ literal[string] ]= identifier[color]
keyword[return] literal[string] . identifier[join] ([ identifier[dumps] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[outputs] ]) | def process_module_output(self, module):
"""
Process the output for a module and return a json string representing it.
Color processing occurs here.
"""
outputs = module['module'].get_latest()
color = module['color']
if color:
for output in outputs:
# Color: substitute the config defined color
if 'color' not in output:
output['color'] = color # depends on [control=['if'], data=['output']] # depends on [control=['for'], data=['output']] # depends on [control=['if'], data=[]]
# Create the json string output.
return ','.join([dumps(x) for x in outputs]) |
def setLineEdit(self, lineEdit):
"""
Sets the line edit instance for this label.
:param lineEdit | <XLineEdit>
"""
self._lineEdit = lineEdit
if lineEdit:
lineEdit.setFont(self.font())
lineEdit.installEventFilter(self)
lineEdit.resize(self.size())
lineEdit.hide() | def function[setLineEdit, parameter[self, lineEdit]]:
constant[
Sets the line edit instance for this label.
:param lineEdit | <XLineEdit>
]
name[self]._lineEdit assign[=] name[lineEdit]
if name[lineEdit] begin[:]
call[name[lineEdit].setFont, parameter[call[name[self].font, parameter[]]]]
call[name[lineEdit].installEventFilter, parameter[name[self]]]
call[name[lineEdit].resize, parameter[call[name[self].size, parameter[]]]]
call[name[lineEdit].hide, parameter[]] | keyword[def] identifier[setLineEdit] ( identifier[self] , identifier[lineEdit] ):
literal[string]
identifier[self] . identifier[_lineEdit] = identifier[lineEdit]
keyword[if] identifier[lineEdit] :
identifier[lineEdit] . identifier[setFont] ( identifier[self] . identifier[font] ())
identifier[lineEdit] . identifier[installEventFilter] ( identifier[self] )
identifier[lineEdit] . identifier[resize] ( identifier[self] . identifier[size] ())
identifier[lineEdit] . identifier[hide] () | def setLineEdit(self, lineEdit):
"""
Sets the line edit instance for this label.
:param lineEdit | <XLineEdit>
"""
self._lineEdit = lineEdit
if lineEdit:
lineEdit.setFont(self.font())
lineEdit.installEventFilter(self)
lineEdit.resize(self.size())
lineEdit.hide() # depends on [control=['if'], data=[]] |
def _build_message_headers(self):
"""Fetch Message Headers (Deliver & Header Frames).
:rtype: tuple|None
"""
basic_deliver = self._inbound.pop(0)
if not isinstance(basic_deliver, specification.Basic.Deliver):
LOGGER.warning(
'Received an out-of-order frame: %s was '
'expecting a Basic.Deliver frame',
type(basic_deliver)
)
return None
content_header = self._inbound.pop(0)
if not isinstance(content_header, ContentHeader):
LOGGER.warning(
'Received an out-of-order frame: %s was '
'expecting a ContentHeader frame',
type(content_header)
)
return None
return basic_deliver, content_header | def function[_build_message_headers, parameter[self]]:
constant[Fetch Message Headers (Deliver & Header Frames).
:rtype: tuple|None
]
variable[basic_deliver] assign[=] call[name[self]._inbound.pop, parameter[constant[0]]]
if <ast.UnaryOp object at 0x7da20c6e7ac0> begin[:]
call[name[LOGGER].warning, parameter[constant[Received an out-of-order frame: %s was expecting a Basic.Deliver frame], call[name[type], parameter[name[basic_deliver]]]]]
return[constant[None]]
variable[content_header] assign[=] call[name[self]._inbound.pop, parameter[constant[0]]]
if <ast.UnaryOp object at 0x7da20c6e7e50> begin[:]
call[name[LOGGER].warning, parameter[constant[Received an out-of-order frame: %s was expecting a ContentHeader frame], call[name[type], parameter[name[content_header]]]]]
return[constant[None]]
return[tuple[[<ast.Name object at 0x7da2054a59f0>, <ast.Name object at 0x7da2054a40a0>]]] | keyword[def] identifier[_build_message_headers] ( identifier[self] ):
literal[string]
identifier[basic_deliver] = identifier[self] . identifier[_inbound] . identifier[pop] ( literal[int] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[basic_deliver] , identifier[specification] . identifier[Basic] . identifier[Deliver] ):
identifier[LOGGER] . identifier[warning] (
literal[string]
literal[string] ,
identifier[type] ( identifier[basic_deliver] )
)
keyword[return] keyword[None]
identifier[content_header] = identifier[self] . identifier[_inbound] . identifier[pop] ( literal[int] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[content_header] , identifier[ContentHeader] ):
identifier[LOGGER] . identifier[warning] (
literal[string]
literal[string] ,
identifier[type] ( identifier[content_header] )
)
keyword[return] keyword[None]
keyword[return] identifier[basic_deliver] , identifier[content_header] | def _build_message_headers(self):
"""Fetch Message Headers (Deliver & Header Frames).
:rtype: tuple|None
"""
basic_deliver = self._inbound.pop(0)
if not isinstance(basic_deliver, specification.Basic.Deliver):
LOGGER.warning('Received an out-of-order frame: %s was expecting a Basic.Deliver frame', type(basic_deliver))
return None # depends on [control=['if'], data=[]]
content_header = self._inbound.pop(0)
if not isinstance(content_header, ContentHeader):
LOGGER.warning('Received an out-of-order frame: %s was expecting a ContentHeader frame', type(content_header))
return None # depends on [control=['if'], data=[]]
return (basic_deliver, content_header) |
def encode_uvarint(n, data):
'''encodes integer into variable-length format into data.'''
if n < 0:
raise ValueError('only support positive integer')
while True:
this_byte = n & 127
n >>= 7
if n == 0:
data.append(this_byte)
break
data.append(this_byte | 128) | def function[encode_uvarint, parameter[n, data]]:
constant[encodes integer into variable-length format into data.]
if compare[name[n] less[<] constant[0]] begin[:]
<ast.Raise object at 0x7da1b2458070>
while constant[True] begin[:]
variable[this_byte] assign[=] binary_operation[name[n] <ast.BitAnd object at 0x7da2590d6b60> constant[127]]
<ast.AugAssign object at 0x7da1b245af50>
if compare[name[n] equal[==] constant[0]] begin[:]
call[name[data].append, parameter[name[this_byte]]]
break
call[name[data].append, parameter[binary_operation[name[this_byte] <ast.BitOr object at 0x7da2590d6aa0> constant[128]]]] | keyword[def] identifier[encode_uvarint] ( identifier[n] , identifier[data] ):
literal[string]
keyword[if] identifier[n] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[while] keyword[True] :
identifier[this_byte] = identifier[n] & literal[int]
identifier[n] >>= literal[int]
keyword[if] identifier[n] == literal[int] :
identifier[data] . identifier[append] ( identifier[this_byte] )
keyword[break]
identifier[data] . identifier[append] ( identifier[this_byte] | literal[int] ) | def encode_uvarint(n, data):
"""encodes integer into variable-length format into data."""
if n < 0:
raise ValueError('only support positive integer') # depends on [control=['if'], data=[]]
while True:
this_byte = n & 127
n >>= 7
if n == 0:
data.append(this_byte)
break # depends on [control=['if'], data=[]]
data.append(this_byte | 128) # depends on [control=['while'], data=[]] |
def _indent(text, amount):
"""Indent a multiline string by some number of spaces.
Parameters
----------
text: str
The text to be indented
amount: int
The number of spaces to indent the text
Returns
-------
indented_text
"""
indentation = amount * ' '
return indentation + ('\n' + indentation).join(text.split('\n')) | def function[_indent, parameter[text, amount]]:
constant[Indent a multiline string by some number of spaces.
Parameters
----------
text: str
The text to be indented
amount: int
The number of spaces to indent the text
Returns
-------
indented_text
]
variable[indentation] assign[=] binary_operation[name[amount] * constant[ ]]
return[binary_operation[name[indentation] + call[binary_operation[constant[
] + name[indentation]].join, parameter[call[name[text].split, parameter[constant[
]]]]]]] | keyword[def] identifier[_indent] ( identifier[text] , identifier[amount] ):
literal[string]
identifier[indentation] = identifier[amount] * literal[string]
keyword[return] identifier[indentation] +( literal[string] + identifier[indentation] ). identifier[join] ( identifier[text] . identifier[split] ( literal[string] )) | def _indent(text, amount):
"""Indent a multiline string by some number of spaces.
Parameters
----------
text: str
The text to be indented
amount: int
The number of spaces to indent the text
Returns
-------
indented_text
"""
indentation = amount * ' '
return indentation + ('\n' + indentation).join(text.split('\n')) |
def imag(self, newimag):
"""Setter for the imaginary part.
This method is invoked by ``x.imag = other``.
Parameters
----------
newimag : array-like or scalar
Values to be assigned to the imaginary part of this element.
"""
try:
iter(newimag)
except TypeError:
# `newimag` is not iterable, assume it can be assigned to
# all indexed parts
for part in self.parts:
part.imag = newimag
return
if self.space.is_power_space:
try:
# Set same value in all parts
for part in self.parts:
part.imag = newimag
except (ValueError, TypeError):
# Iterate over all parts and set them separately
for part, new_im in zip(self.parts, newimag):
part.imag = new_im
pass
elif len(newimag) == len(self):
for part, new_im in zip(self.parts, newimag):
part.imag = new_im
else:
raise ValueError(
'dimensions of the new imaginary part does not match the '
'space, got element {} to set real part of {}}'
''.format(newimag, self)) | def function[imag, parameter[self, newimag]]:
constant[Setter for the imaginary part.
This method is invoked by ``x.imag = other``.
Parameters
----------
newimag : array-like or scalar
Values to be assigned to the imaginary part of this element.
]
<ast.Try object at 0x7da1b1e5c6a0>
if name[self].space.is_power_space begin[:]
<ast.Try object at 0x7da1b1e5d3c0> | keyword[def] identifier[imag] ( identifier[self] , identifier[newimag] ):
literal[string]
keyword[try] :
identifier[iter] ( identifier[newimag] )
keyword[except] identifier[TypeError] :
keyword[for] identifier[part] keyword[in] identifier[self] . identifier[parts] :
identifier[part] . identifier[imag] = identifier[newimag]
keyword[return]
keyword[if] identifier[self] . identifier[space] . identifier[is_power_space] :
keyword[try] :
keyword[for] identifier[part] keyword[in] identifier[self] . identifier[parts] :
identifier[part] . identifier[imag] = identifier[newimag]
keyword[except] ( identifier[ValueError] , identifier[TypeError] ):
keyword[for] identifier[part] , identifier[new_im] keyword[in] identifier[zip] ( identifier[self] . identifier[parts] , identifier[newimag] ):
identifier[part] . identifier[imag] = identifier[new_im]
keyword[pass]
keyword[elif] identifier[len] ( identifier[newimag] )== identifier[len] ( identifier[self] ):
keyword[for] identifier[part] , identifier[new_im] keyword[in] identifier[zip] ( identifier[self] . identifier[parts] , identifier[newimag] ):
identifier[part] . identifier[imag] = identifier[new_im]
keyword[else] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[newimag] , identifier[self] )) | def imag(self, newimag):
"""Setter for the imaginary part.
This method is invoked by ``x.imag = other``.
Parameters
----------
newimag : array-like or scalar
Values to be assigned to the imaginary part of this element.
"""
try:
iter(newimag) # depends on [control=['try'], data=[]]
except TypeError:
# `newimag` is not iterable, assume it can be assigned to
# all indexed parts
for part in self.parts:
part.imag = newimag # depends on [control=['for'], data=['part']]
return # depends on [control=['except'], data=[]]
if self.space.is_power_space:
try:
# Set same value in all parts
for part in self.parts:
part.imag = newimag # depends on [control=['for'], data=['part']] # depends on [control=['try'], data=[]]
except (ValueError, TypeError):
# Iterate over all parts and set them separately
for (part, new_im) in zip(self.parts, newimag):
part.imag = new_im # depends on [control=['for'], data=[]]
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif len(newimag) == len(self):
for (part, new_im) in zip(self.parts, newimag):
part.imag = new_im # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
raise ValueError('dimensions of the new imaginary part does not match the space, got element {} to set real part of {}}'.format(newimag, self)) |
def get_new_tag(self, api_tag):
"""
Instantiate a new Tag from api data.
:param api_tag: the api data for the Tag
:return: the new Tag
"""
return Tag(site_id=self.site_id,
wp_id=api_tag["ID"],
**self.api_object_data("tag", api_tag)) | def function[get_new_tag, parameter[self, api_tag]]:
constant[
Instantiate a new Tag from api data.
:param api_tag: the api data for the Tag
:return: the new Tag
]
return[call[name[Tag], parameter[]]] | keyword[def] identifier[get_new_tag] ( identifier[self] , identifier[api_tag] ):
literal[string]
keyword[return] identifier[Tag] ( identifier[site_id] = identifier[self] . identifier[site_id] ,
identifier[wp_id] = identifier[api_tag] [ literal[string] ],
** identifier[self] . identifier[api_object_data] ( literal[string] , identifier[api_tag] )) | def get_new_tag(self, api_tag):
"""
Instantiate a new Tag from api data.
:param api_tag: the api data for the Tag
:return: the new Tag
"""
return Tag(site_id=self.site_id, wp_id=api_tag['ID'], **self.api_object_data('tag', api_tag)) |
def generate_name_variations(name):
"""Generate name variations for a given name.
Args:
name (six.text_type): The name whose variations are to be generated.
Returns:
list: All the name variations for the given name.
Notes:
Uses `unidecode` for doing unicode characters transliteration to ASCII ones. This was chosen so that we can map
both full names of authors in HEP records and user's input to the same space and thus make exact queries work.
"""
def _update_name_variations_with_product(set_a, set_b):
name_variations.update([
unidecode((names_variation[0] +
separator +
names_variation[1]).strip(''.join(_LASTNAME_NON_LASTNAME_SEPARATORS))).lower()
for names_variation
in product(set_a, set_b)
for separator
in _LASTNAME_NON_LASTNAME_SEPARATORS
])
parsed_name = ParsedName.loads(name)
# Handle rare-case of single-name
if len(parsed_name) == 1:
return [parsed_name.dumps().lower()]
name_variations = set()
# We need to filter out empty entries, since HumanName for this name `Perelstein,, Maxim` returns a first_list with
# an empty string element.
non_lastnames = [
non_lastname
for non_lastname
in parsed_name.first_list + parsed_name.suffix_list
if non_lastname
]
# This is needed because due to erroneous data (e.g. having many authors in a single authors field) ends up
# requiring a lot of memory (due to combinatorial expansion of all non lastnames).
# The policy is to use the input as a name variation, since this data will have to be curated.
if len(non_lastnames) > _NAMES_MAX_NUMBER_THRESHOLD or len(parsed_name.last_list) > _NAMES_MAX_NUMBER_THRESHOLD:
LOGGER.error('Skipping name variations generation - too many names in: "%s"', name)
return [name]
non_lastnames_variations = \
_generate_non_lastnames_variations(non_lastnames)
lastnames_variations = _generate_lastnames_variations(parsed_name.last_list)
# Create variations where lastnames comes first and is separated from non lastnames either by space or comma.
_update_name_variations_with_product(lastnames_variations, non_lastnames_variations)
# Second part of transformations - having the lastnames in the end.
_update_name_variations_with_product(non_lastnames_variations, lastnames_variations)
return list(name_variations) | def function[generate_name_variations, parameter[name]]:
constant[Generate name variations for a given name.
Args:
name (six.text_type): The name whose variations are to be generated.
Returns:
list: All the name variations for the given name.
Notes:
Uses `unidecode` for doing unicode characters transliteration to ASCII ones. This was chosen so that we can map
both full names of authors in HEP records and user's input to the same space and thus make exact queries work.
]
def function[_update_name_variations_with_product, parameter[set_a, set_b]]:
call[name[name_variations].update, parameter[<ast.ListComp object at 0x7da18dc9ae00>]]
variable[parsed_name] assign[=] call[name[ParsedName].loads, parameter[name[name]]]
if compare[call[name[len], parameter[name[parsed_name]]] equal[==] constant[1]] begin[:]
return[list[[<ast.Call object at 0x7da207f9a0b0>]]]
variable[name_variations] assign[=] call[name[set], parameter[]]
variable[non_lastnames] assign[=] <ast.ListComp object at 0x7da207f99540>
if <ast.BoolOp object at 0x7da207f994b0> begin[:]
call[name[LOGGER].error, parameter[constant[Skipping name variations generation - too many names in: "%s"], name[name]]]
return[list[[<ast.Name object at 0x7da18f00f640>]]]
variable[non_lastnames_variations] assign[=] call[name[_generate_non_lastnames_variations], parameter[name[non_lastnames]]]
variable[lastnames_variations] assign[=] call[name[_generate_lastnames_variations], parameter[name[parsed_name].last_list]]
call[name[_update_name_variations_with_product], parameter[name[lastnames_variations], name[non_lastnames_variations]]]
call[name[_update_name_variations_with_product], parameter[name[non_lastnames_variations], name[lastnames_variations]]]
return[call[name[list], parameter[name[name_variations]]]] | keyword[def] identifier[generate_name_variations] ( identifier[name] ):
literal[string]
keyword[def] identifier[_update_name_variations_with_product] ( identifier[set_a] , identifier[set_b] ):
identifier[name_variations] . identifier[update] ([
identifier[unidecode] (( identifier[names_variation] [ literal[int] ]+
identifier[separator] +
identifier[names_variation] [ literal[int] ]). identifier[strip] ( literal[string] . identifier[join] ( identifier[_LASTNAME_NON_LASTNAME_SEPARATORS] ))). identifier[lower] ()
keyword[for] identifier[names_variation]
keyword[in] identifier[product] ( identifier[set_a] , identifier[set_b] )
keyword[for] identifier[separator]
keyword[in] identifier[_LASTNAME_NON_LASTNAME_SEPARATORS]
])
identifier[parsed_name] = identifier[ParsedName] . identifier[loads] ( identifier[name] )
keyword[if] identifier[len] ( identifier[parsed_name] )== literal[int] :
keyword[return] [ identifier[parsed_name] . identifier[dumps] (). identifier[lower] ()]
identifier[name_variations] = identifier[set] ()
identifier[non_lastnames] =[
identifier[non_lastname]
keyword[for] identifier[non_lastname]
keyword[in] identifier[parsed_name] . identifier[first_list] + identifier[parsed_name] . identifier[suffix_list]
keyword[if] identifier[non_lastname]
]
keyword[if] identifier[len] ( identifier[non_lastnames] )> identifier[_NAMES_MAX_NUMBER_THRESHOLD] keyword[or] identifier[len] ( identifier[parsed_name] . identifier[last_list] )> identifier[_NAMES_MAX_NUMBER_THRESHOLD] :
identifier[LOGGER] . identifier[error] ( literal[string] , identifier[name] )
keyword[return] [ identifier[name] ]
identifier[non_lastnames_variations] = identifier[_generate_non_lastnames_variations] ( identifier[non_lastnames] )
identifier[lastnames_variations] = identifier[_generate_lastnames_variations] ( identifier[parsed_name] . identifier[last_list] )
identifier[_update_name_variations_with_product] ( identifier[lastnames_variations] , identifier[non_lastnames_variations] )
identifier[_update_name_variations_with_product] ( identifier[non_lastnames_variations] , identifier[lastnames_variations] )
keyword[return] identifier[list] ( identifier[name_variations] ) | def generate_name_variations(name):
"""Generate name variations for a given name.
Args:
name (six.text_type): The name whose variations are to be generated.
Returns:
list: All the name variations for the given name.
Notes:
Uses `unidecode` for doing unicode characters transliteration to ASCII ones. This was chosen so that we can map
both full names of authors in HEP records and user's input to the same space and thus make exact queries work.
"""
def _update_name_variations_with_product(set_a, set_b):
name_variations.update([unidecode((names_variation[0] + separator + names_variation[1]).strip(''.join(_LASTNAME_NON_LASTNAME_SEPARATORS))).lower() for names_variation in product(set_a, set_b) for separator in _LASTNAME_NON_LASTNAME_SEPARATORS])
parsed_name = ParsedName.loads(name)
# Handle rare-case of single-name
if len(parsed_name) == 1:
return [parsed_name.dumps().lower()] # depends on [control=['if'], data=[]]
name_variations = set()
# We need to filter out empty entries, since HumanName for this name `Perelstein,, Maxim` returns a first_list with
# an empty string element.
non_lastnames = [non_lastname for non_lastname in parsed_name.first_list + parsed_name.suffix_list if non_lastname]
# This is needed because due to erroneous data (e.g. having many authors in a single authors field) ends up
# requiring a lot of memory (due to combinatorial expansion of all non lastnames).
# The policy is to use the input as a name variation, since this data will have to be curated.
if len(non_lastnames) > _NAMES_MAX_NUMBER_THRESHOLD or len(parsed_name.last_list) > _NAMES_MAX_NUMBER_THRESHOLD:
LOGGER.error('Skipping name variations generation - too many names in: "%s"', name)
return [name] # depends on [control=['if'], data=[]]
non_lastnames_variations = _generate_non_lastnames_variations(non_lastnames)
lastnames_variations = _generate_lastnames_variations(parsed_name.last_list)
# Create variations where lastnames comes first and is separated from non lastnames either by space or comma.
_update_name_variations_with_product(lastnames_variations, non_lastnames_variations)
# Second part of transformations - having the lastnames in the end.
_update_name_variations_with_product(non_lastnames_variations, lastnames_variations)
return list(name_variations) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.