code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def get_style_dict(tree):
"""
Some things that are considered lists are actually supposed to be H tags
(h1, h2, etc.) These can be denoted by their styleId
"""
# This is a partial document and actual h1 is the document title, which
# will be displayed elsewhere.
headers = {
'heading 1': 'h2',
'heading 2': 'h3',
'heading 3': 'h4',
'heading 4': 'h5',
'heading 5': 'h6',
'heading 6': 'h6',
'heading 7': 'h6',
'heading 8': 'h6',
'heading 9': 'h6',
'heading 10': 'h6',
}
if tree is None:
return {}
w_namespace = get_namespace(tree, 'w')
result = {}
for el in tree:
style_id = el.get('%sstyleId' % w_namespace)
el_result = {
'header': False,
'font_size': None,
'based_on': None,
}
# Get the header info
name = el.find('%sname' % w_namespace)
if name is None:
continue
value = name.get('%sval' % w_namespace).lower()
if value in headers:
el_result['header'] = headers[value]
# Get the size info.
rpr = el.find('%srPr' % w_namespace)
if rpr is None:
continue
size = rpr.find('%ssz' % w_namespace)
if size is None:
el_result['font_size'] = None
else:
el_result['font_size'] = size.get('%sval' % w_namespace)
# Get based on info.
based_on = el.find('%sbasedOn' % w_namespace)
if based_on is None:
el_result['based_on'] = None
else:
el_result['based_on'] = based_on.get('%sval' % w_namespace)
result[style_id] = el_result
return result | def function[get_style_dict, parameter[tree]]:
constant[
Some things that are considered lists are actually supposed to be H tags
(h1, h2, etc.) These can be denoted by their styleId
]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b023e380>, <ast.Constant object at 0x7da1b023f610>, <ast.Constant object at 0x7da1b023ea10>, <ast.Constant object at 0x7da1b023f010>, <ast.Constant object at 0x7da1b023df00>, <ast.Constant object at 0x7da1b023dc30>, <ast.Constant object at 0x7da1b023d2a0>, <ast.Constant object at 0x7da1b023f040>, <ast.Constant object at 0x7da1b023fdf0>, <ast.Constant object at 0x7da1b023dfc0>], [<ast.Constant object at 0x7da1b023ca60>, <ast.Constant object at 0x7da1b023e830>, <ast.Constant object at 0x7da1b023dcf0>, <ast.Constant object at 0x7da1b023f400>, <ast.Constant object at 0x7da1b023e080>, <ast.Constant object at 0x7da1b023d660>, <ast.Constant object at 0x7da1b023ef80>, <ast.Constant object at 0x7da1b023c190>, <ast.Constant object at 0x7da1b023dc60>, <ast.Constant object at 0x7da1b023d420>]]
if compare[name[tree] is constant[None]] begin[:]
return[dictionary[[], []]]
variable[w_namespace] assign[=] call[name[get_namespace], parameter[name[tree], constant[w]]]
variable[result] assign[=] dictionary[[], []]
for taget[name[el]] in starred[name[tree]] begin[:]
variable[style_id] assign[=] call[name[el].get, parameter[binary_operation[constant[%sstyleId] <ast.Mod object at 0x7da2590d6920> name[w_namespace]]]]
variable[el_result] assign[=] dictionary[[<ast.Constant object at 0x7da1b023f100>, <ast.Constant object at 0x7da1b023d7e0>, <ast.Constant object at 0x7da1b023e6b0>], [<ast.Constant object at 0x7da1b023d780>, <ast.Constant object at 0x7da1b023d480>, <ast.Constant object at 0x7da1b023d030>]]
variable[name] assign[=] call[name[el].find, parameter[binary_operation[constant[%sname] <ast.Mod object at 0x7da2590d6920> name[w_namespace]]]]
if compare[name[name] is constant[None]] begin[:]
continue
variable[value] assign[=] call[call[name[name].get, parameter[binary_operation[constant[%sval] <ast.Mod object at 0x7da2590d6920> name[w_namespace]]]].lower, parameter[]]
if compare[name[value] in name[headers]] begin[:]
call[name[el_result]][constant[header]] assign[=] call[name[headers]][name[value]]
variable[rpr] assign[=] call[name[el].find, parameter[binary_operation[constant[%srPr] <ast.Mod object at 0x7da2590d6920> name[w_namespace]]]]
if compare[name[rpr] is constant[None]] begin[:]
continue
variable[size] assign[=] call[name[rpr].find, parameter[binary_operation[constant[%ssz] <ast.Mod object at 0x7da2590d6920> name[w_namespace]]]]
if compare[name[size] is constant[None]] begin[:]
call[name[el_result]][constant[font_size]] assign[=] constant[None]
variable[based_on] assign[=] call[name[el].find, parameter[binary_operation[constant[%sbasedOn] <ast.Mod object at 0x7da2590d6920> name[w_namespace]]]]
if compare[name[based_on] is constant[None]] begin[:]
call[name[el_result]][constant[based_on]] assign[=] constant[None]
call[name[result]][name[style_id]] assign[=] name[el_result]
return[name[result]] | keyword[def] identifier[get_style_dict] ( identifier[tree] ):
literal[string]
identifier[headers] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
keyword[if] identifier[tree] keyword[is] keyword[None] :
keyword[return] {}
identifier[w_namespace] = identifier[get_namespace] ( identifier[tree] , literal[string] )
identifier[result] ={}
keyword[for] identifier[el] keyword[in] identifier[tree] :
identifier[style_id] = identifier[el] . identifier[get] ( literal[string] % identifier[w_namespace] )
identifier[el_result] ={
literal[string] : keyword[False] ,
literal[string] : keyword[None] ,
literal[string] : keyword[None] ,
}
identifier[name] = identifier[el] . identifier[find] ( literal[string] % identifier[w_namespace] )
keyword[if] identifier[name] keyword[is] keyword[None] :
keyword[continue]
identifier[value] = identifier[name] . identifier[get] ( literal[string] % identifier[w_namespace] ). identifier[lower] ()
keyword[if] identifier[value] keyword[in] identifier[headers] :
identifier[el_result] [ literal[string] ]= identifier[headers] [ identifier[value] ]
identifier[rpr] = identifier[el] . identifier[find] ( literal[string] % identifier[w_namespace] )
keyword[if] identifier[rpr] keyword[is] keyword[None] :
keyword[continue]
identifier[size] = identifier[rpr] . identifier[find] ( literal[string] % identifier[w_namespace] )
keyword[if] identifier[size] keyword[is] keyword[None] :
identifier[el_result] [ literal[string] ]= keyword[None]
keyword[else] :
identifier[el_result] [ literal[string] ]= identifier[size] . identifier[get] ( literal[string] % identifier[w_namespace] )
identifier[based_on] = identifier[el] . identifier[find] ( literal[string] % identifier[w_namespace] )
keyword[if] identifier[based_on] keyword[is] keyword[None] :
identifier[el_result] [ literal[string] ]= keyword[None]
keyword[else] :
identifier[el_result] [ literal[string] ]= identifier[based_on] . identifier[get] ( literal[string] % identifier[w_namespace] )
identifier[result] [ identifier[style_id] ]= identifier[el_result]
keyword[return] identifier[result] | def get_style_dict(tree):
"""
Some things that are considered lists are actually supposed to be H tags
(h1, h2, etc.) These can be denoted by their styleId
"""
# This is a partial document and actual h1 is the document title, which
# will be displayed elsewhere.
headers = {'heading 1': 'h2', 'heading 2': 'h3', 'heading 3': 'h4', 'heading 4': 'h5', 'heading 5': 'h6', 'heading 6': 'h6', 'heading 7': 'h6', 'heading 8': 'h6', 'heading 9': 'h6', 'heading 10': 'h6'}
if tree is None:
return {} # depends on [control=['if'], data=[]]
w_namespace = get_namespace(tree, 'w')
result = {}
for el in tree:
style_id = el.get('%sstyleId' % w_namespace)
el_result = {'header': False, 'font_size': None, 'based_on': None}
# Get the header info
name = el.find('%sname' % w_namespace)
if name is None:
continue # depends on [control=['if'], data=[]]
value = name.get('%sval' % w_namespace).lower()
if value in headers:
el_result['header'] = headers[value] # depends on [control=['if'], data=['value', 'headers']]
# Get the size info.
rpr = el.find('%srPr' % w_namespace)
if rpr is None:
continue # depends on [control=['if'], data=[]]
size = rpr.find('%ssz' % w_namespace)
if size is None:
el_result['font_size'] = None # depends on [control=['if'], data=[]]
else:
el_result['font_size'] = size.get('%sval' % w_namespace)
# Get based on info.
based_on = el.find('%sbasedOn' % w_namespace)
if based_on is None:
el_result['based_on'] = None # depends on [control=['if'], data=[]]
else:
el_result['based_on'] = based_on.get('%sval' % w_namespace)
result[style_id] = el_result # depends on [control=['for'], data=['el']]
return result |
def parse_torrent_properties(table_datas):
"""
Static method that parses a given list of table data elements and using helper methods
`Parser.is_subcategory`, `Parser.is_quality`, `Parser.is_language`, collects torrent properties.
:param list lxml.HtmlElement table_datas: table_datas to parse
:return: identified category, subcategory, quality and languages.
:rtype: dict
"""
output = {'category': table_datas[0].text, 'subcategory': None, 'quality': None, 'language': None}
for i in range(1, len(table_datas)):
td = table_datas[i]
url = td.get('href')
params = Parser.get_params(url)
if Parser.is_subcategory(params) and not output['subcategory']:
output['subcategory'] = td.text
elif Parser.is_quality(params) and not output['quality']:
output['quality'] = td.text
elif Parser.is_language(params) and not output['language']:
output['language'] = td.text
return output | def function[parse_torrent_properties, parameter[table_datas]]:
constant[
Static method that parses a given list of table data elements and using helper methods
`Parser.is_subcategory`, `Parser.is_quality`, `Parser.is_language`, collects torrent properties.
:param list lxml.HtmlElement table_datas: table_datas to parse
:return: identified category, subcategory, quality and languages.
:rtype: dict
]
variable[output] assign[=] dictionary[[<ast.Constant object at 0x7da1b170e7d0>, <ast.Constant object at 0x7da1b170d930>, <ast.Constant object at 0x7da1b170dae0>, <ast.Constant object at 0x7da1b170f190>], [<ast.Attribute object at 0x7da1b170c6a0>, <ast.Constant object at 0x7da1b170d0f0>, <ast.Constant object at 0x7da1b170f970>, <ast.Constant object at 0x7da1b170c640>]]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[table_datas]]]]]] begin[:]
variable[td] assign[=] call[name[table_datas]][name[i]]
variable[url] assign[=] call[name[td].get, parameter[constant[href]]]
variable[params] assign[=] call[name[Parser].get_params, parameter[name[url]]]
if <ast.BoolOp object at 0x7da1b179c250> begin[:]
call[name[output]][constant[subcategory]] assign[=] name[td].text
return[name[output]] | keyword[def] identifier[parse_torrent_properties] ( identifier[table_datas] ):
literal[string]
identifier[output] ={ literal[string] : identifier[table_datas] [ literal[int] ]. identifier[text] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] }
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[table_datas] )):
identifier[td] = identifier[table_datas] [ identifier[i] ]
identifier[url] = identifier[td] . identifier[get] ( literal[string] )
identifier[params] = identifier[Parser] . identifier[get_params] ( identifier[url] )
keyword[if] identifier[Parser] . identifier[is_subcategory] ( identifier[params] ) keyword[and] keyword[not] identifier[output] [ literal[string] ]:
identifier[output] [ literal[string] ]= identifier[td] . identifier[text]
keyword[elif] identifier[Parser] . identifier[is_quality] ( identifier[params] ) keyword[and] keyword[not] identifier[output] [ literal[string] ]:
identifier[output] [ literal[string] ]= identifier[td] . identifier[text]
keyword[elif] identifier[Parser] . identifier[is_language] ( identifier[params] ) keyword[and] keyword[not] identifier[output] [ literal[string] ]:
identifier[output] [ literal[string] ]= identifier[td] . identifier[text]
keyword[return] identifier[output] | def parse_torrent_properties(table_datas):
"""
Static method that parses a given list of table data elements and using helper methods
`Parser.is_subcategory`, `Parser.is_quality`, `Parser.is_language`, collects torrent properties.
:param list lxml.HtmlElement table_datas: table_datas to parse
:return: identified category, subcategory, quality and languages.
:rtype: dict
"""
output = {'category': table_datas[0].text, 'subcategory': None, 'quality': None, 'language': None}
for i in range(1, len(table_datas)):
td = table_datas[i]
url = td.get('href')
params = Parser.get_params(url)
if Parser.is_subcategory(params) and (not output['subcategory']):
output['subcategory'] = td.text # depends on [control=['if'], data=[]]
elif Parser.is_quality(params) and (not output['quality']):
output['quality'] = td.text # depends on [control=['if'], data=[]]
elif Parser.is_language(params) and (not output['language']):
output['language'] = td.text # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return output |
def diam_floc_vel_term(ConcAl, ConcClay, coag, material,
DIM_FRACTAL, VelTerm, Temp):
"""Calculate floc diamter as a function of terminal velocity."""
WaterDensity = pc.density_water(Temp).magnitude
return (material.Diameter * (((18 * VelTerm * PHI_FLOC
* pc.viscosity_kinematic(Temp).magnitude
)
/ (pc.gravity.magnitude * material.Diameter**2)
)
* (WaterDensity
/ (dens_floc_init(ConcAl, ConcClay, coag,
material).magnitude
- WaterDensity
)
)
) ** (1 / (DIM_FRACTAL - 1))
) | def function[diam_floc_vel_term, parameter[ConcAl, ConcClay, coag, material, DIM_FRACTAL, VelTerm, Temp]]:
constant[Calculate floc diamter as a function of terminal velocity.]
variable[WaterDensity] assign[=] call[name[pc].density_water, parameter[name[Temp]]].magnitude
return[binary_operation[name[material].Diameter * binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[18] * name[VelTerm]] * name[PHI_FLOC]] * call[name[pc].viscosity_kinematic, parameter[name[Temp]]].magnitude] / binary_operation[name[pc].gravity.magnitude * binary_operation[name[material].Diameter ** constant[2]]]] * binary_operation[name[WaterDensity] / binary_operation[call[name[dens_floc_init], parameter[name[ConcAl], name[ConcClay], name[coag], name[material]]].magnitude - name[WaterDensity]]]] ** binary_operation[constant[1] / binary_operation[name[DIM_FRACTAL] - constant[1]]]]]] | keyword[def] identifier[diam_floc_vel_term] ( identifier[ConcAl] , identifier[ConcClay] , identifier[coag] , identifier[material] ,
identifier[DIM_FRACTAL] , identifier[VelTerm] , identifier[Temp] ):
literal[string]
identifier[WaterDensity] = identifier[pc] . identifier[density_water] ( identifier[Temp] ). identifier[magnitude]
keyword[return] ( identifier[material] . identifier[Diameter] *((( literal[int] * identifier[VelTerm] * identifier[PHI_FLOC]
* identifier[pc] . identifier[viscosity_kinematic] ( identifier[Temp] ). identifier[magnitude]
)
/( identifier[pc] . identifier[gravity] . identifier[magnitude] * identifier[material] . identifier[Diameter] ** literal[int] )
)
*( identifier[WaterDensity]
/( identifier[dens_floc_init] ( identifier[ConcAl] , identifier[ConcClay] , identifier[coag] ,
identifier[material] ). identifier[magnitude]
- identifier[WaterDensity]
)
)
)**( literal[int] /( identifier[DIM_FRACTAL] - literal[int] ))
) | def diam_floc_vel_term(ConcAl, ConcClay, coag, material, DIM_FRACTAL, VelTerm, Temp):
"""Calculate floc diamter as a function of terminal velocity."""
WaterDensity = pc.density_water(Temp).magnitude
return material.Diameter * (18 * VelTerm * PHI_FLOC * pc.viscosity_kinematic(Temp).magnitude / (pc.gravity.magnitude * material.Diameter ** 2) * (WaterDensity / (dens_floc_init(ConcAl, ConcClay, coag, material).magnitude - WaterDensity))) ** (1 / (DIM_FRACTAL - 1)) |
def _ReadEnumerationDataTypeDefinition(
self, definitions_registry, definition_values, definition_name,
is_member=False):
"""Reads an enumeration data type definition.
Args:
definitions_registry (DataTypeDefinitionsRegistry): data type definitions
registry.
definition_values (dict[str, object]): definition values.
definition_name (str): name of the definition.
is_member (Optional[bool]): True if the data type definition is a member
data type definition.
Returns:
EnumerationDataTypeDefinition: enumeration data type definition.
Raises:
DefinitionReaderError: if the definitions values are missing or if
the format is incorrect.
"""
if is_member:
error_message = 'data type not supported as member'
raise errors.DefinitionReaderError(definition_name, error_message)
values = definition_values.get('values')
if not values:
error_message = 'missing values'
raise errors.DefinitionReaderError(definition_name, error_message)
definition_object = self._ReadSemanticDataTypeDefinition(
definitions_registry, definition_values,
data_types.EnumerationDefinition, definition_name,
self._SUPPORTED_DEFINITION_VALUES_ENUMERATION)
last_name = None
for enumeration_value in values:
aliases = enumeration_value.get('aliases', None)
description = enumeration_value.get('description', None)
name = enumeration_value.get('name', None)
number = enumeration_value.get('number', None)
if not name or number is None:
if last_name:
error_location = 'after: {0:s}'.format(last_name)
else:
error_location = 'at start'
error_message = '{0:s} missing name or number'.format(error_location)
raise errors.DefinitionReaderError(definition_name, error_message)
else:
try:
definition_object.AddValue(
name, number, aliases=aliases, description=description)
except KeyError as exception:
error_message = '{0!s}'.format(exception)
raise errors.DefinitionReaderError(definition_name, error_message)
last_name = name
return definition_object | def function[_ReadEnumerationDataTypeDefinition, parameter[self, definitions_registry, definition_values, definition_name, is_member]]:
constant[Reads an enumeration data type definition.
Args:
definitions_registry (DataTypeDefinitionsRegistry): data type definitions
registry.
definition_values (dict[str, object]): definition values.
definition_name (str): name of the definition.
is_member (Optional[bool]): True if the data type definition is a member
data type definition.
Returns:
EnumerationDataTypeDefinition: enumeration data type definition.
Raises:
DefinitionReaderError: if the definitions values are missing or if
the format is incorrect.
]
if name[is_member] begin[:]
variable[error_message] assign[=] constant[data type not supported as member]
<ast.Raise object at 0x7da1b0d6be80>
variable[values] assign[=] call[name[definition_values].get, parameter[constant[values]]]
if <ast.UnaryOp object at 0x7da1b0d68f40> begin[:]
variable[error_message] assign[=] constant[missing values]
<ast.Raise object at 0x7da1b0d6bf70>
variable[definition_object] assign[=] call[name[self]._ReadSemanticDataTypeDefinition, parameter[name[definitions_registry], name[definition_values], name[data_types].EnumerationDefinition, name[definition_name], name[self]._SUPPORTED_DEFINITION_VALUES_ENUMERATION]]
variable[last_name] assign[=] constant[None]
for taget[name[enumeration_value]] in starred[name[values]] begin[:]
variable[aliases] assign[=] call[name[enumeration_value].get, parameter[constant[aliases], constant[None]]]
variable[description] assign[=] call[name[enumeration_value].get, parameter[constant[description], constant[None]]]
variable[name] assign[=] call[name[enumeration_value].get, parameter[constant[name], constant[None]]]
variable[number] assign[=] call[name[enumeration_value].get, parameter[constant[number], constant[None]]]
if <ast.BoolOp object at 0x7da1b0d510f0> begin[:]
if name[last_name] begin[:]
variable[error_location] assign[=] call[constant[after: {0:s}].format, parameter[name[last_name]]]
variable[error_message] assign[=] call[constant[{0:s} missing name or number].format, parameter[name[error_location]]]
<ast.Raise object at 0x7da1b0d6ada0>
variable[last_name] assign[=] name[name]
return[name[definition_object]] | keyword[def] identifier[_ReadEnumerationDataTypeDefinition] (
identifier[self] , identifier[definitions_registry] , identifier[definition_values] , identifier[definition_name] ,
identifier[is_member] = keyword[False] ):
literal[string]
keyword[if] identifier[is_member] :
identifier[error_message] = literal[string]
keyword[raise] identifier[errors] . identifier[DefinitionReaderError] ( identifier[definition_name] , identifier[error_message] )
identifier[values] = identifier[definition_values] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[values] :
identifier[error_message] = literal[string]
keyword[raise] identifier[errors] . identifier[DefinitionReaderError] ( identifier[definition_name] , identifier[error_message] )
identifier[definition_object] = identifier[self] . identifier[_ReadSemanticDataTypeDefinition] (
identifier[definitions_registry] , identifier[definition_values] ,
identifier[data_types] . identifier[EnumerationDefinition] , identifier[definition_name] ,
identifier[self] . identifier[_SUPPORTED_DEFINITION_VALUES_ENUMERATION] )
identifier[last_name] = keyword[None]
keyword[for] identifier[enumeration_value] keyword[in] identifier[values] :
identifier[aliases] = identifier[enumeration_value] . identifier[get] ( literal[string] , keyword[None] )
identifier[description] = identifier[enumeration_value] . identifier[get] ( literal[string] , keyword[None] )
identifier[name] = identifier[enumeration_value] . identifier[get] ( literal[string] , keyword[None] )
identifier[number] = identifier[enumeration_value] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] keyword[not] identifier[name] keyword[or] identifier[number] keyword[is] keyword[None] :
keyword[if] identifier[last_name] :
identifier[error_location] = literal[string] . identifier[format] ( identifier[last_name] )
keyword[else] :
identifier[error_location] = literal[string]
identifier[error_message] = literal[string] . identifier[format] ( identifier[error_location] )
keyword[raise] identifier[errors] . identifier[DefinitionReaderError] ( identifier[definition_name] , identifier[error_message] )
keyword[else] :
keyword[try] :
identifier[definition_object] . identifier[AddValue] (
identifier[name] , identifier[number] , identifier[aliases] = identifier[aliases] , identifier[description] = identifier[description] )
keyword[except] identifier[KeyError] keyword[as] identifier[exception] :
identifier[error_message] = literal[string] . identifier[format] ( identifier[exception] )
keyword[raise] identifier[errors] . identifier[DefinitionReaderError] ( identifier[definition_name] , identifier[error_message] )
identifier[last_name] = identifier[name]
keyword[return] identifier[definition_object] | def _ReadEnumerationDataTypeDefinition(self, definitions_registry, definition_values, definition_name, is_member=False):
"""Reads an enumeration data type definition.
Args:
definitions_registry (DataTypeDefinitionsRegistry): data type definitions
registry.
definition_values (dict[str, object]): definition values.
definition_name (str): name of the definition.
is_member (Optional[bool]): True if the data type definition is a member
data type definition.
Returns:
EnumerationDataTypeDefinition: enumeration data type definition.
Raises:
DefinitionReaderError: if the definitions values are missing or if
the format is incorrect.
"""
if is_member:
error_message = 'data type not supported as member'
raise errors.DefinitionReaderError(definition_name, error_message) # depends on [control=['if'], data=[]]
values = definition_values.get('values')
if not values:
error_message = 'missing values'
raise errors.DefinitionReaderError(definition_name, error_message) # depends on [control=['if'], data=[]]
definition_object = self._ReadSemanticDataTypeDefinition(definitions_registry, definition_values, data_types.EnumerationDefinition, definition_name, self._SUPPORTED_DEFINITION_VALUES_ENUMERATION)
last_name = None
for enumeration_value in values:
aliases = enumeration_value.get('aliases', None)
description = enumeration_value.get('description', None)
name = enumeration_value.get('name', None)
number = enumeration_value.get('number', None)
if not name or number is None:
if last_name:
error_location = 'after: {0:s}'.format(last_name) # depends on [control=['if'], data=[]]
else:
error_location = 'at start'
error_message = '{0:s} missing name or number'.format(error_location)
raise errors.DefinitionReaderError(definition_name, error_message) # depends on [control=['if'], data=[]]
else:
try:
definition_object.AddValue(name, number, aliases=aliases, description=description) # depends on [control=['try'], data=[]]
except KeyError as exception:
error_message = '{0!s}'.format(exception)
raise errors.DefinitionReaderError(definition_name, error_message) # depends on [control=['except'], data=['exception']]
last_name = name # depends on [control=['for'], data=['enumeration_value']]
return definition_object |
def decode_json(cls, dct):
''' Custom JSON decoder for Events.
Can be used as the ``object_hook`` argument of ``json.load`` or
``json.loads``.
Args:
dct (dict) : a JSON dictionary to decode
The dictionary should have keys ``event_name`` and ``event_values``
Raises:
ValueError, if the event_name is unknown
Examples:
.. code-block:: python
>>> import json
>>> from bokeh.events import Event
>>> data = '{"event_name": "pan", "event_values" : {"model_id": 1, "x": 10, "y": 20, "sx": 200, "sy": 37}}'
>>> json.loads(data, object_hook=Event.decode_json)
<bokeh.events.Pan object at 0x1040f84a8>
'''
if not ('event_name' in dct and 'event_values' in dct):
return dct
event_name = dct['event_name']
if event_name not in _CONCRETE_EVENT_CLASSES:
raise ValueError("Could not find appropriate Event class for event_name: %r" % event_name)
event_values = dct['event_values']
model_id = event_values.pop('model_id')
event = _CONCRETE_EVENT_CLASSES[event_name](model=None, **event_values)
event._model_id = model_id
return event | def function[decode_json, parameter[cls, dct]]:
constant[ Custom JSON decoder for Events.
Can be used as the ``object_hook`` argument of ``json.load`` or
``json.loads``.
Args:
dct (dict) : a JSON dictionary to decode
The dictionary should have keys ``event_name`` and ``event_values``
Raises:
ValueError, if the event_name is unknown
Examples:
.. code-block:: python
>>> import json
>>> from bokeh.events import Event
>>> data = '{"event_name": "pan", "event_values" : {"model_id": 1, "x": 10, "y": 20, "sx": 200, "sy": 37}}'
>>> json.loads(data, object_hook=Event.decode_json)
<bokeh.events.Pan object at 0x1040f84a8>
]
if <ast.UnaryOp object at 0x7da20e955540> begin[:]
return[name[dct]]
variable[event_name] assign[=] call[name[dct]][constant[event_name]]
if compare[name[event_name] <ast.NotIn object at 0x7da2590d7190> name[_CONCRETE_EVENT_CLASSES]] begin[:]
<ast.Raise object at 0x7da2044c0af0>
variable[event_values] assign[=] call[name[dct]][constant[event_values]]
variable[model_id] assign[=] call[name[event_values].pop, parameter[constant[model_id]]]
variable[event] assign[=] call[call[name[_CONCRETE_EVENT_CLASSES]][name[event_name]], parameter[]]
name[event]._model_id assign[=] name[model_id]
return[name[event]] | keyword[def] identifier[decode_json] ( identifier[cls] , identifier[dct] ):
literal[string]
keyword[if] keyword[not] ( literal[string] keyword[in] identifier[dct] keyword[and] literal[string] keyword[in] identifier[dct] ):
keyword[return] identifier[dct]
identifier[event_name] = identifier[dct] [ literal[string] ]
keyword[if] identifier[event_name] keyword[not] keyword[in] identifier[_CONCRETE_EVENT_CLASSES] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[event_name] )
identifier[event_values] = identifier[dct] [ literal[string] ]
identifier[model_id] = identifier[event_values] . identifier[pop] ( literal[string] )
identifier[event] = identifier[_CONCRETE_EVENT_CLASSES] [ identifier[event_name] ]( identifier[model] = keyword[None] ,** identifier[event_values] )
identifier[event] . identifier[_model_id] = identifier[model_id]
keyword[return] identifier[event] | def decode_json(cls, dct):
""" Custom JSON decoder for Events.
Can be used as the ``object_hook`` argument of ``json.load`` or
``json.loads``.
Args:
dct (dict) : a JSON dictionary to decode
The dictionary should have keys ``event_name`` and ``event_values``
Raises:
ValueError, if the event_name is unknown
Examples:
.. code-block:: python
>>> import json
>>> from bokeh.events import Event
>>> data = '{"event_name": "pan", "event_values" : {"model_id": 1, "x": 10, "y": 20, "sx": 200, "sy": 37}}'
>>> json.loads(data, object_hook=Event.decode_json)
<bokeh.events.Pan object at 0x1040f84a8>
"""
if not ('event_name' in dct and 'event_values' in dct):
return dct # depends on [control=['if'], data=[]]
event_name = dct['event_name']
if event_name not in _CONCRETE_EVENT_CLASSES:
raise ValueError('Could not find appropriate Event class for event_name: %r' % event_name) # depends on [control=['if'], data=['event_name']]
event_values = dct['event_values']
model_id = event_values.pop('model_id')
event = _CONCRETE_EVENT_CLASSES[event_name](model=None, **event_values)
event._model_id = model_id
return event |
def setup(self):
"""
NSCF calculations should use the same FFT mesh as the one employed in the GS task
(in principle, it's possible to interpolate inside Abinit but tests revealed some numerical noise
Here we change the input file of the NSCF task to have the same FFT mesh.
"""
for dep in self.deps:
if "DEN" in dep.exts:
parent_task = dep.node
break
else:
raise RuntimeError("Cannot find parent node producing DEN file")
with parent_task.open_gsr() as gsr:
den_mesh = 3 * [None]
den_mesh[0] = gsr.reader.read_dimvalue("number_of_grid_points_vector1")
den_mesh[1] = gsr.reader.read_dimvalue("number_of_grid_points_vector2")
den_mesh[2] = gsr.reader.read_dimvalue("number_of_grid_points_vector3")
if self.ispaw:
self.set_vars(ngfftdg=den_mesh)
else:
self.set_vars(ngfft=den_mesh)
super().setup() | def function[setup, parameter[self]]:
constant[
NSCF calculations should use the same FFT mesh as the one employed in the GS task
(in principle, it's possible to interpolate inside Abinit but tests revealed some numerical noise
Here we change the input file of the NSCF task to have the same FFT mesh.
]
for taget[name[dep]] in starred[name[self].deps] begin[:]
if compare[constant[DEN] in name[dep].exts] begin[:]
variable[parent_task] assign[=] name[dep].node
break
with call[name[parent_task].open_gsr, parameter[]] begin[:]
variable[den_mesh] assign[=] binary_operation[constant[3] * list[[<ast.Constant object at 0x7da20c6a8130>]]]
call[name[den_mesh]][constant[0]] assign[=] call[name[gsr].reader.read_dimvalue, parameter[constant[number_of_grid_points_vector1]]]
call[name[den_mesh]][constant[1]] assign[=] call[name[gsr].reader.read_dimvalue, parameter[constant[number_of_grid_points_vector2]]]
call[name[den_mesh]][constant[2]] assign[=] call[name[gsr].reader.read_dimvalue, parameter[constant[number_of_grid_points_vector3]]]
if name[self].ispaw begin[:]
call[name[self].set_vars, parameter[]]
call[call[name[super], parameter[]].setup, parameter[]] | keyword[def] identifier[setup] ( identifier[self] ):
literal[string]
keyword[for] identifier[dep] keyword[in] identifier[self] . identifier[deps] :
keyword[if] literal[string] keyword[in] identifier[dep] . identifier[exts] :
identifier[parent_task] = identifier[dep] . identifier[node]
keyword[break]
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[with] identifier[parent_task] . identifier[open_gsr] () keyword[as] identifier[gsr] :
identifier[den_mesh] = literal[int] *[ keyword[None] ]
identifier[den_mesh] [ literal[int] ]= identifier[gsr] . identifier[reader] . identifier[read_dimvalue] ( literal[string] )
identifier[den_mesh] [ literal[int] ]= identifier[gsr] . identifier[reader] . identifier[read_dimvalue] ( literal[string] )
identifier[den_mesh] [ literal[int] ]= identifier[gsr] . identifier[reader] . identifier[read_dimvalue] ( literal[string] )
keyword[if] identifier[self] . identifier[ispaw] :
identifier[self] . identifier[set_vars] ( identifier[ngfftdg] = identifier[den_mesh] )
keyword[else] :
identifier[self] . identifier[set_vars] ( identifier[ngfft] = identifier[den_mesh] )
identifier[super] (). identifier[setup] () | def setup(self):
"""
NSCF calculations should use the same FFT mesh as the one employed in the GS task
(in principle, it's possible to interpolate inside Abinit but tests revealed some numerical noise
Here we change the input file of the NSCF task to have the same FFT mesh.
"""
for dep in self.deps:
if 'DEN' in dep.exts:
parent_task = dep.node
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dep']]
else:
raise RuntimeError('Cannot find parent node producing DEN file')
with parent_task.open_gsr() as gsr:
den_mesh = 3 * [None]
den_mesh[0] = gsr.reader.read_dimvalue('number_of_grid_points_vector1')
den_mesh[1] = gsr.reader.read_dimvalue('number_of_grid_points_vector2')
den_mesh[2] = gsr.reader.read_dimvalue('number_of_grid_points_vector3')
if self.ispaw:
self.set_vars(ngfftdg=den_mesh) # depends on [control=['if'], data=[]]
else:
self.set_vars(ngfft=den_mesh) # depends on [control=['with'], data=['gsr']]
super().setup() |
def changeMenu(self,menu):
"""
Changes to the given menu.
``menu`` must be a valid menu name that is currently known.
.. versionchanged:: 1.2a1
The push/pop handlers have been deprecated in favor of the new :py:meth:`Menu.on_enter() <peng3d.menu.Menu.on_enter>`\ , :py:meth:`Menu.on_exit() <peng3d.menu.Menu.on_exit>`\ , etc. events.
"""
if menu not in self.menus:
raise ValueError("Menu %s does not exist!"%menu)
elif menu == self.activeMenu:
return # Ignore double menu activation to prevent bugs in menu initializer
old = self.activeMenu
self.activeMenu = menu
if old is not None:
self.menus[old].on_exit(menu)
self.menus[old].doAction("exit")
#self.pop_handlers()
self.menu.on_enter(old)
self.menu.doAction("enter")
self.peng.sendEvent("peng3d:window.menu.change",{"peng":self.peng,"window":self,"old":old,"menu":menu}) | def function[changeMenu, parameter[self, menu]]:
constant[
Changes to the given menu.
``menu`` must be a valid menu name that is currently known.
.. versionchanged:: 1.2a1
The push/pop handlers have been deprecated in favor of the new :py:meth:`Menu.on_enter() <peng3d.menu.Menu.on_enter>`\ , :py:meth:`Menu.on_exit() <peng3d.menu.Menu.on_exit>`\ , etc. events.
]
if compare[name[menu] <ast.NotIn object at 0x7da2590d7190> name[self].menus] begin[:]
<ast.Raise object at 0x7da1b016c520>
variable[old] assign[=] name[self].activeMenu
name[self].activeMenu assign[=] name[menu]
if compare[name[old] is_not constant[None]] begin[:]
call[call[name[self].menus][name[old]].on_exit, parameter[name[menu]]]
call[call[name[self].menus][name[old]].doAction, parameter[constant[exit]]]
call[name[self].menu.on_enter, parameter[name[old]]]
call[name[self].menu.doAction, parameter[constant[enter]]]
call[name[self].peng.sendEvent, parameter[constant[peng3d:window.menu.change], dictionary[[<ast.Constant object at 0x7da1b01daef0>, <ast.Constant object at 0x7da1b01dafb0>, <ast.Constant object at 0x7da1b01d83d0>, <ast.Constant object at 0x7da1b01d92d0>], [<ast.Attribute object at 0x7da1b01da920>, <ast.Name object at 0x7da1b01d8100>, <ast.Name object at 0x7da1b01d9120>, <ast.Name object at 0x7da1b01d8640>]]]] | keyword[def] identifier[changeMenu] ( identifier[self] , identifier[menu] ):
literal[string]
keyword[if] identifier[menu] keyword[not] keyword[in] identifier[self] . identifier[menus] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[menu] )
keyword[elif] identifier[menu] == identifier[self] . identifier[activeMenu] :
keyword[return]
identifier[old] = identifier[self] . identifier[activeMenu]
identifier[self] . identifier[activeMenu] = identifier[menu]
keyword[if] identifier[old] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[menus] [ identifier[old] ]. identifier[on_exit] ( identifier[menu] )
identifier[self] . identifier[menus] [ identifier[old] ]. identifier[doAction] ( literal[string] )
identifier[self] . identifier[menu] . identifier[on_enter] ( identifier[old] )
identifier[self] . identifier[menu] . identifier[doAction] ( literal[string] )
identifier[self] . identifier[peng] . identifier[sendEvent] ( literal[string] ,{ literal[string] : identifier[self] . identifier[peng] , literal[string] : identifier[self] , literal[string] : identifier[old] , literal[string] : identifier[menu] }) | def changeMenu(self, menu):
"""
Changes to the given menu.
``menu`` must be a valid menu name that is currently known.
.. versionchanged:: 1.2a1
The push/pop handlers have been deprecated in favor of the new :py:meth:`Menu.on_enter() <peng3d.menu.Menu.on_enter>`\\ , :py:meth:`Menu.on_exit() <peng3d.menu.Menu.on_exit>`\\ , etc. events.
"""
if menu not in self.menus:
raise ValueError('Menu %s does not exist!' % menu) # depends on [control=['if'], data=['menu']]
elif menu == self.activeMenu:
return # Ignore double menu activation to prevent bugs in menu initializer # depends on [control=['if'], data=[]]
old = self.activeMenu
self.activeMenu = menu
if old is not None:
self.menus[old].on_exit(menu)
self.menus[old].doAction('exit') # depends on [control=['if'], data=['old']]
#self.pop_handlers()
self.menu.on_enter(old)
self.menu.doAction('enter')
self.peng.sendEvent('peng3d:window.menu.change', {'peng': self.peng, 'window': self, 'old': old, 'menu': menu}) |
def transmit_learner_data(username, channel_code, channel_pk):
"""
Task to send learner data to each linked integrated channel.
Arguments:
username (str): The username of the User to be used for making API requests for learner data.
channel_code (str): Capitalized identifier for the integrated channel
channel_pk (str): Primary key for identifying integrated channel
"""
start = time.time()
api_user = User.objects.get(username=username)
integrated_channel = INTEGRATED_CHANNEL_CHOICES[channel_code].objects.get(pk=channel_pk)
LOGGER.info('Processing learners for integrated channel using configuration: [%s]', integrated_channel)
# Note: learner data transmission code paths don't raise any uncaught exception, so we don't need a broad
# try-except block here.
integrated_channel.transmit_learner_data(api_user)
duration = time.time() - start
LOGGER.info(
'Learner data transmission task for integrated channel configuration [%s] took [%s] seconds',
integrated_channel,
duration
) | def function[transmit_learner_data, parameter[username, channel_code, channel_pk]]:
constant[
Task to send learner data to each linked integrated channel.
Arguments:
username (str): The username of the User to be used for making API requests for learner data.
channel_code (str): Capitalized identifier for the integrated channel
channel_pk (str): Primary key for identifying integrated channel
]
variable[start] assign[=] call[name[time].time, parameter[]]
variable[api_user] assign[=] call[name[User].objects.get, parameter[]]
variable[integrated_channel] assign[=] call[call[name[INTEGRATED_CHANNEL_CHOICES]][name[channel_code]].objects.get, parameter[]]
call[name[LOGGER].info, parameter[constant[Processing learners for integrated channel using configuration: [%s]], name[integrated_channel]]]
call[name[integrated_channel].transmit_learner_data, parameter[name[api_user]]]
variable[duration] assign[=] binary_operation[call[name[time].time, parameter[]] - name[start]]
call[name[LOGGER].info, parameter[constant[Learner data transmission task for integrated channel configuration [%s] took [%s] seconds], name[integrated_channel], name[duration]]] | keyword[def] identifier[transmit_learner_data] ( identifier[username] , identifier[channel_code] , identifier[channel_pk] ):
literal[string]
identifier[start] = identifier[time] . identifier[time] ()
identifier[api_user] = identifier[User] . identifier[objects] . identifier[get] ( identifier[username] = identifier[username] )
identifier[integrated_channel] = identifier[INTEGRATED_CHANNEL_CHOICES] [ identifier[channel_code] ]. identifier[objects] . identifier[get] ( identifier[pk] = identifier[channel_pk] )
identifier[LOGGER] . identifier[info] ( literal[string] , identifier[integrated_channel] )
identifier[integrated_channel] . identifier[transmit_learner_data] ( identifier[api_user] )
identifier[duration] = identifier[time] . identifier[time] ()- identifier[start]
identifier[LOGGER] . identifier[info] (
literal[string] ,
identifier[integrated_channel] ,
identifier[duration]
) | def transmit_learner_data(username, channel_code, channel_pk):
"""
Task to send learner data to each linked integrated channel.
Arguments:
username (str): The username of the User to be used for making API requests for learner data.
channel_code (str): Capitalized identifier for the integrated channel
channel_pk (str): Primary key for identifying integrated channel
"""
start = time.time()
api_user = User.objects.get(username=username)
integrated_channel = INTEGRATED_CHANNEL_CHOICES[channel_code].objects.get(pk=channel_pk)
LOGGER.info('Processing learners for integrated channel using configuration: [%s]', integrated_channel)
# Note: learner data transmission code paths don't raise any uncaught exception, so we don't need a broad
# try-except block here.
integrated_channel.transmit_learner_data(api_user)
duration = time.time() - start
LOGGER.info('Learner data transmission task for integrated channel configuration [%s] took [%s] seconds', integrated_channel, duration) |
def battery(self):
"""
Current system batter status (:py:class:`Battery`).
"""
if self._voltage is None or self._current is None or self._level is None:
return None
return Battery(self._voltage, self._current, self._level) | def function[battery, parameter[self]]:
constant[
Current system batter status (:py:class:`Battery`).
]
if <ast.BoolOp object at 0x7da1b1caf790> begin[:]
return[constant[None]]
return[call[name[Battery], parameter[name[self]._voltage, name[self]._current, name[self]._level]]] | keyword[def] identifier[battery] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_voltage] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[_current] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[_level] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] identifier[Battery] ( identifier[self] . identifier[_voltage] , identifier[self] . identifier[_current] , identifier[self] . identifier[_level] ) | def battery(self):
"""
Current system batter status (:py:class:`Battery`).
"""
if self._voltage is None or self._current is None or self._level is None:
return None # depends on [control=['if'], data=[]]
return Battery(self._voltage, self._current, self._level) |
def eval(self, n, signed=False):
"""
Evaluate this StridedInterval to obtain a list of concrete integers.
:param n: Upper bound for the number of concrete integers
:param signed: Treat this StridedInterval as signed or unsigned
:return: A list of at most `n` concrete integers
"""
if self.is_empty:
# no value is available
return [ ]
if self._reversed:
return self._reverse().eval(n, signed=signed)
results = [ ]
if self.stride == 0 and n > 0:
results.append(self.lower_bound)
else:
if signed:
# View it as a signed integer
bounds = self._signed_bounds()
else:
# View it as an unsigned integer
bounds = self._unsigned_bounds()
for lb, ub in bounds:
while len(results) < n and lb <= ub:
results.append(lb)
lb += self.stride # It will not overflow
return results | def function[eval, parameter[self, n, signed]]:
constant[
Evaluate this StridedInterval to obtain a list of concrete integers.
:param n: Upper bound for the number of concrete integers
:param signed: Treat this StridedInterval as signed or unsigned
:return: A list of at most `n` concrete integers
]
if name[self].is_empty begin[:]
return[list[[]]]
if name[self]._reversed begin[:]
return[call[call[name[self]._reverse, parameter[]].eval, parameter[name[n]]]]
variable[results] assign[=] list[[]]
if <ast.BoolOp object at 0x7da18eb56ec0> begin[:]
call[name[results].append, parameter[name[self].lower_bound]]
return[name[results]] | keyword[def] identifier[eval] ( identifier[self] , identifier[n] , identifier[signed] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[is_empty] :
keyword[return] []
keyword[if] identifier[self] . identifier[_reversed] :
keyword[return] identifier[self] . identifier[_reverse] (). identifier[eval] ( identifier[n] , identifier[signed] = identifier[signed] )
identifier[results] =[]
keyword[if] identifier[self] . identifier[stride] == literal[int] keyword[and] identifier[n] > literal[int] :
identifier[results] . identifier[append] ( identifier[self] . identifier[lower_bound] )
keyword[else] :
keyword[if] identifier[signed] :
identifier[bounds] = identifier[self] . identifier[_signed_bounds] ()
keyword[else] :
identifier[bounds] = identifier[self] . identifier[_unsigned_bounds] ()
keyword[for] identifier[lb] , identifier[ub] keyword[in] identifier[bounds] :
keyword[while] identifier[len] ( identifier[results] )< identifier[n] keyword[and] identifier[lb] <= identifier[ub] :
identifier[results] . identifier[append] ( identifier[lb] )
identifier[lb] += identifier[self] . identifier[stride]
keyword[return] identifier[results] | def eval(self, n, signed=False):
"""
Evaluate this StridedInterval to obtain a list of concrete integers.
:param n: Upper bound for the number of concrete integers
:param signed: Treat this StridedInterval as signed or unsigned
:return: A list of at most `n` concrete integers
"""
if self.is_empty:
# no value is available
return [] # depends on [control=['if'], data=[]]
if self._reversed:
return self._reverse().eval(n, signed=signed) # depends on [control=['if'], data=[]]
results = []
if self.stride == 0 and n > 0:
results.append(self.lower_bound) # depends on [control=['if'], data=[]]
else:
if signed:
# View it as a signed integer
bounds = self._signed_bounds() # depends on [control=['if'], data=[]]
else:
# View it as an unsigned integer
bounds = self._unsigned_bounds()
for (lb, ub) in bounds:
while len(results) < n and lb <= ub:
results.append(lb)
lb += self.stride # It will not overflow # depends on [control=['while'], data=[]] # depends on [control=['for'], data=[]]
return results |
def AddBookkeepingOperators(model):
"""This adds a few bookkeeping operators that we can inspect later.
These operators do not affect the training procedure: they only collect
statistics and prints them to file or to logs.
"""
# Print basically prints out the content of the blob. to_file=1 routes the
# printed output to a file. The file is going to be stored under
# root_folder/[blob name]
model.Print('accuracy', [], to_file=1)
model.Print('loss', [], to_file=1)
# Summarizes the parameters. Different from Print, Summarize gives some
# statistics of the parameter, such as mean, std, min and max.
for param in model.params:
model.Summarize(param, [], to_file=1)
model.Summarize(model.param_to_grad[param], [], to_file=1) | def function[AddBookkeepingOperators, parameter[model]]:
constant[This adds a few bookkeeping operators that we can inspect later.
These operators do not affect the training procedure: they only collect
statistics and prints them to file or to logs.
]
call[name[model].Print, parameter[constant[accuracy], list[[]]]]
call[name[model].Print, parameter[constant[loss], list[[]]]]
for taget[name[param]] in starred[name[model].params] begin[:]
call[name[model].Summarize, parameter[name[param], list[[]]]]
call[name[model].Summarize, parameter[call[name[model].param_to_grad][name[param]], list[[]]]] | keyword[def] identifier[AddBookkeepingOperators] ( identifier[model] ):
literal[string]
identifier[model] . identifier[Print] ( literal[string] ,[], identifier[to_file] = literal[int] )
identifier[model] . identifier[Print] ( literal[string] ,[], identifier[to_file] = literal[int] )
keyword[for] identifier[param] keyword[in] identifier[model] . identifier[params] :
identifier[model] . identifier[Summarize] ( identifier[param] ,[], identifier[to_file] = literal[int] )
identifier[model] . identifier[Summarize] ( identifier[model] . identifier[param_to_grad] [ identifier[param] ],[], identifier[to_file] = literal[int] ) | def AddBookkeepingOperators(model):
"""This adds a few bookkeeping operators that we can inspect later.
These operators do not affect the training procedure: they only collect
statistics and prints them to file or to logs.
"""
# Print basically prints out the content of the blob. to_file=1 routes the
# printed output to a file. The file is going to be stored under
# root_folder/[blob name]
model.Print('accuracy', [], to_file=1)
model.Print('loss', [], to_file=1)
# Summarizes the parameters. Different from Print, Summarize gives some
# statistics of the parameter, such as mean, std, min and max.
for param in model.params:
model.Summarize(param, [], to_file=1)
model.Summarize(model.param_to_grad[param], [], to_file=1) # depends on [control=['for'], data=['param']] |
def _make_renderer(self, at_paths, at_encoding, **kwargs):
"""
:param at_paths: Template search paths
:param at_encoding: Template encoding
:param kwargs: Keyword arguments passed to the template engine to
render templates with specific features enabled.
"""
for eopt in ("file_encoding", "string_encoding"):
default = self._roptions.get(eopt, at_encoding.lower())
self._roptions[eopt] = kwargs.get(eopt, default)
pkey = "search_dirs"
paths = kwargs.get(pkey, []) + self._roptions.get(pkey, [])
if at_paths is not None:
paths = at_paths + paths
self._roptions[pkey] = paths
return pystache.renderer.Renderer(**self._roptions) | def function[_make_renderer, parameter[self, at_paths, at_encoding]]:
constant[
:param at_paths: Template search paths
:param at_encoding: Template encoding
:param kwargs: Keyword arguments passed to the template engine to
render templates with specific features enabled.
]
for taget[name[eopt]] in starred[tuple[[<ast.Constant object at 0x7da204566050>, <ast.Constant object at 0x7da204567340>]]] begin[:]
variable[default] assign[=] call[name[self]._roptions.get, parameter[name[eopt], call[name[at_encoding].lower, parameter[]]]]
call[name[self]._roptions][name[eopt]] assign[=] call[name[kwargs].get, parameter[name[eopt], name[default]]]
variable[pkey] assign[=] constant[search_dirs]
variable[paths] assign[=] binary_operation[call[name[kwargs].get, parameter[name[pkey], list[[]]]] + call[name[self]._roptions.get, parameter[name[pkey], list[[]]]]]
if compare[name[at_paths] is_not constant[None]] begin[:]
variable[paths] assign[=] binary_operation[name[at_paths] + name[paths]]
call[name[self]._roptions][name[pkey]] assign[=] name[paths]
return[call[name[pystache].renderer.Renderer, parameter[]]] | keyword[def] identifier[_make_renderer] ( identifier[self] , identifier[at_paths] , identifier[at_encoding] ,** identifier[kwargs] ):
literal[string]
keyword[for] identifier[eopt] keyword[in] ( literal[string] , literal[string] ):
identifier[default] = identifier[self] . identifier[_roptions] . identifier[get] ( identifier[eopt] , identifier[at_encoding] . identifier[lower] ())
identifier[self] . identifier[_roptions] [ identifier[eopt] ]= identifier[kwargs] . identifier[get] ( identifier[eopt] , identifier[default] )
identifier[pkey] = literal[string]
identifier[paths] = identifier[kwargs] . identifier[get] ( identifier[pkey] ,[])+ identifier[self] . identifier[_roptions] . identifier[get] ( identifier[pkey] ,[])
keyword[if] identifier[at_paths] keyword[is] keyword[not] keyword[None] :
identifier[paths] = identifier[at_paths] + identifier[paths]
identifier[self] . identifier[_roptions] [ identifier[pkey] ]= identifier[paths]
keyword[return] identifier[pystache] . identifier[renderer] . identifier[Renderer] (** identifier[self] . identifier[_roptions] ) | def _make_renderer(self, at_paths, at_encoding, **kwargs):
"""
:param at_paths: Template search paths
:param at_encoding: Template encoding
:param kwargs: Keyword arguments passed to the template engine to
render templates with specific features enabled.
"""
for eopt in ('file_encoding', 'string_encoding'):
default = self._roptions.get(eopt, at_encoding.lower())
self._roptions[eopt] = kwargs.get(eopt, default) # depends on [control=['for'], data=['eopt']]
pkey = 'search_dirs'
paths = kwargs.get(pkey, []) + self._roptions.get(pkey, [])
if at_paths is not None:
paths = at_paths + paths # depends on [control=['if'], data=['at_paths']]
self._roptions[pkey] = paths
return pystache.renderer.Renderer(**self._roptions) |
def check_global_valid(self):
"""
Makes sure there aren't any SAM resources in a template that will be used in a CloudFormation StackSet
:return: bool
"""
serverless_cnt = len(list(filter(lambda x: x._serverless_type, self._data['resources'])))
if serverless_cnt > 0:
return False
return True | def function[check_global_valid, parameter[self]]:
constant[
Makes sure there aren't any SAM resources in a template that will be used in a CloudFormation StackSet
:return: bool
]
variable[serverless_cnt] assign[=] call[name[len], parameter[call[name[list], parameter[call[name[filter], parameter[<ast.Lambda object at 0x7da1b1864fa0>, call[name[self]._data][constant[resources]]]]]]]]
if compare[name[serverless_cnt] greater[>] constant[0]] begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[check_global_valid] ( identifier[self] ):
literal[string]
identifier[serverless_cnt] = identifier[len] ( identifier[list] ( identifier[filter] ( keyword[lambda] identifier[x] : identifier[x] . identifier[_serverless_type] , identifier[self] . identifier[_data] [ literal[string] ])))
keyword[if] identifier[serverless_cnt] > literal[int] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def check_global_valid(self):
"""
Makes sure there aren't any SAM resources in a template that will be used in a CloudFormation StackSet
:return: bool
"""
serverless_cnt = len(list(filter(lambda x: x._serverless_type, self._data['resources'])))
if serverless_cnt > 0:
return False # depends on [control=['if'], data=[]]
return True |
def value(self):
"""
access the value
"""
if isinstance(self._value, str):
dimension = self._value
return getattr(self.call, dimension).value
return super(CallDimensionI, self)._get_value() | def function[value, parameter[self]]:
constant[
access the value
]
if call[name[isinstance], parameter[name[self]._value, name[str]]] begin[:]
variable[dimension] assign[=] name[self]._value
return[call[name[getattr], parameter[name[self].call, name[dimension]]].value]
return[call[call[name[super], parameter[name[CallDimensionI], name[self]]]._get_value, parameter[]]] | keyword[def] identifier[value] ( identifier[self] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[self] . identifier[_value] , identifier[str] ):
identifier[dimension] = identifier[self] . identifier[_value]
keyword[return] identifier[getattr] ( identifier[self] . identifier[call] , identifier[dimension] ). identifier[value]
keyword[return] identifier[super] ( identifier[CallDimensionI] , identifier[self] ). identifier[_get_value] () | def value(self):
"""
access the value
"""
if isinstance(self._value, str):
dimension = self._value
return getattr(self.call, dimension).value # depends on [control=['if'], data=[]]
return super(CallDimensionI, self)._get_value() |
def make_redirect_url(self, path_info, query_args=None, domain_part=None):
"""Creates a redirect URL.
:internal:
"""
suffix = ""
if query_args:
suffix = "?" + self.encode_query_args(query_args)
return str(
"%s://%s/%s%s"
% (
self.url_scheme or "http",
self.get_host(domain_part),
posixpath.join(
self.script_name[:-1].lstrip("/"), path_info.lstrip("/")
),
suffix,
)
) | def function[make_redirect_url, parameter[self, path_info, query_args, domain_part]]:
constant[Creates a redirect URL.
:internal:
]
variable[suffix] assign[=] constant[]
if name[query_args] begin[:]
variable[suffix] assign[=] binary_operation[constant[?] + call[name[self].encode_query_args, parameter[name[query_args]]]]
return[call[name[str], parameter[binary_operation[constant[%s://%s/%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BoolOp object at 0x7da18dc04c10>, <ast.Call object at 0x7da18dc07b80>, <ast.Call object at 0x7da18f58f610>, <ast.Name object at 0x7da18f58ec20>]]]]]] | keyword[def] identifier[make_redirect_url] ( identifier[self] , identifier[path_info] , identifier[query_args] = keyword[None] , identifier[domain_part] = keyword[None] ):
literal[string]
identifier[suffix] = literal[string]
keyword[if] identifier[query_args] :
identifier[suffix] = literal[string] + identifier[self] . identifier[encode_query_args] ( identifier[query_args] )
keyword[return] identifier[str] (
literal[string]
%(
identifier[self] . identifier[url_scheme] keyword[or] literal[string] ,
identifier[self] . identifier[get_host] ( identifier[domain_part] ),
identifier[posixpath] . identifier[join] (
identifier[self] . identifier[script_name] [:- literal[int] ]. identifier[lstrip] ( literal[string] ), identifier[path_info] . identifier[lstrip] ( literal[string] )
),
identifier[suffix] ,
)
) | def make_redirect_url(self, path_info, query_args=None, domain_part=None):
"""Creates a redirect URL.
:internal:
"""
suffix = ''
if query_args:
suffix = '?' + self.encode_query_args(query_args) # depends on [control=['if'], data=[]]
return str('%s://%s/%s%s' % (self.url_scheme or 'http', self.get_host(domain_part), posixpath.join(self.script_name[:-1].lstrip('/'), path_info.lstrip('/')), suffix)) |
def heading(self):
"""
Returns the table heading. If the table is not declared, attempts to declare it and return heading.
:return: table heading
"""
if self._heading is None:
self._heading = Heading() # instance-level heading
if not self._heading: # lazy loading of heading
if self.connection is None:
raise DataJointError(
'DataJoint class is missing a database connection. '
'Missing schema decorator on the class? (e.g. @schema)')
else:
self._heading.init_from_database(self.connection, self.database, self.table_name)
return self._heading | def function[heading, parameter[self]]:
constant[
Returns the table heading. If the table is not declared, attempts to declare it and return heading.
:return: table heading
]
if compare[name[self]._heading is constant[None]] begin[:]
name[self]._heading assign[=] call[name[Heading], parameter[]]
if <ast.UnaryOp object at 0x7da18f09d060> begin[:]
if compare[name[self].connection is constant[None]] begin[:]
<ast.Raise object at 0x7da18f09c250>
return[name[self]._heading] | keyword[def] identifier[heading] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_heading] keyword[is] keyword[None] :
identifier[self] . identifier[_heading] = identifier[Heading] ()
keyword[if] keyword[not] identifier[self] . identifier[_heading] :
keyword[if] identifier[self] . identifier[connection] keyword[is] keyword[None] :
keyword[raise] identifier[DataJointError] (
literal[string]
literal[string] )
keyword[else] :
identifier[self] . identifier[_heading] . identifier[init_from_database] ( identifier[self] . identifier[connection] , identifier[self] . identifier[database] , identifier[self] . identifier[table_name] )
keyword[return] identifier[self] . identifier[_heading] | def heading(self):
"""
Returns the table heading. If the table is not declared, attempts to declare it and return heading.
:return: table heading
"""
if self._heading is None:
self._heading = Heading() # instance-level heading # depends on [control=['if'], data=[]]
if not self._heading: # lazy loading of heading
if self.connection is None:
raise DataJointError('DataJoint class is missing a database connection. Missing schema decorator on the class? (e.g. @schema)') # depends on [control=['if'], data=[]]
else:
self._heading.init_from_database(self.connection, self.database, self.table_name) # depends on [control=['if'], data=[]]
return self._heading |
def delete_organization(self, organization_id):
"""
Deletes an organization for a given organization ID
:param organization_id:
:return:
"""
log.warning('Deleting organization...')
url = 'rest/servicedeskapi/organization/{}'.format(organization_id)
return self.delete(url, headers=self.experimental_headers) | def function[delete_organization, parameter[self, organization_id]]:
constant[
Deletes an organization for a given organization ID
:param organization_id:
:return:
]
call[name[log].warning, parameter[constant[Deleting organization...]]]
variable[url] assign[=] call[constant[rest/servicedeskapi/organization/{}].format, parameter[name[organization_id]]]
return[call[name[self].delete, parameter[name[url]]]] | keyword[def] identifier[delete_organization] ( identifier[self] , identifier[organization_id] ):
literal[string]
identifier[log] . identifier[warning] ( literal[string] )
identifier[url] = literal[string] . identifier[format] ( identifier[organization_id] )
keyword[return] identifier[self] . identifier[delete] ( identifier[url] , identifier[headers] = identifier[self] . identifier[experimental_headers] ) | def delete_organization(self, organization_id):
"""
Deletes an organization for a given organization ID
:param organization_id:
:return:
"""
log.warning('Deleting organization...')
url = 'rest/servicedeskapi/organization/{}'.format(organization_id)
return self.delete(url, headers=self.experimental_headers) |
def guess_format(path):
"""Determine the file format of a folder or a file"""
for fmt in formats:
if fmt.verify(path):
return fmt.__name__
else:
msg = "Undefined file format: '{}'".format(path)
raise UnknownFileFormatError(msg) | def function[guess_format, parameter[path]]:
constant[Determine the file format of a folder or a file]
for taget[name[fmt]] in starred[name[formats]] begin[:]
if call[name[fmt].verify, parameter[name[path]]] begin[:]
return[name[fmt].__name__] | keyword[def] identifier[guess_format] ( identifier[path] ):
literal[string]
keyword[for] identifier[fmt] keyword[in] identifier[formats] :
keyword[if] identifier[fmt] . identifier[verify] ( identifier[path] ):
keyword[return] identifier[fmt] . identifier[__name__]
keyword[else] :
identifier[msg] = literal[string] . identifier[format] ( identifier[path] )
keyword[raise] identifier[UnknownFileFormatError] ( identifier[msg] ) | def guess_format(path):
"""Determine the file format of a folder or a file"""
for fmt in formats:
if fmt.verify(path):
return fmt.__name__ # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['fmt']]
else:
msg = "Undefined file format: '{}'".format(path)
raise UnknownFileFormatError(msg) |
def render_linked_js(self, js_files: Iterable[str]) -> str:
"""Default method used to render the final js links for the
rendered webpage.
Override this method in a sub-classed controller to change the output.
"""
paths = []
unique_paths = set() # type: Set[str]
for path in js_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
return "".join(
'<script src="'
+ escape.xhtml_escape(p)
+ '" type="text/javascript"></script>'
for p in paths
) | def function[render_linked_js, parameter[self, js_files]]:
constant[Default method used to render the final js links for the
rendered webpage.
Override this method in a sub-classed controller to change the output.
]
variable[paths] assign[=] list[[]]
variable[unique_paths] assign[=] call[name[set], parameter[]]
for taget[name[path]] in starred[name[js_files]] begin[:]
if <ast.UnaryOp object at 0x7da1b1fc9a20> begin[:]
variable[path] assign[=] call[name[self].static_url, parameter[name[path]]]
if compare[name[path] <ast.NotIn object at 0x7da2590d7190> name[unique_paths]] begin[:]
call[name[paths].append, parameter[name[path]]]
call[name[unique_paths].add, parameter[name[path]]]
return[call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da1b1fcbd30>]]] | keyword[def] identifier[render_linked_js] ( identifier[self] , identifier[js_files] : identifier[Iterable] [ identifier[str] ])-> identifier[str] :
literal[string]
identifier[paths] =[]
identifier[unique_paths] = identifier[set] ()
keyword[for] identifier[path] keyword[in] identifier[js_files] :
keyword[if] keyword[not] identifier[is_absolute] ( identifier[path] ):
identifier[path] = identifier[self] . identifier[static_url] ( identifier[path] )
keyword[if] identifier[path] keyword[not] keyword[in] identifier[unique_paths] :
identifier[paths] . identifier[append] ( identifier[path] )
identifier[unique_paths] . identifier[add] ( identifier[path] )
keyword[return] literal[string] . identifier[join] (
literal[string]
+ identifier[escape] . identifier[xhtml_escape] ( identifier[p] )
+ literal[string]
keyword[for] identifier[p] keyword[in] identifier[paths]
) | def render_linked_js(self, js_files: Iterable[str]) -> str:
"""Default method used to render the final js links for the
rendered webpage.
Override this method in a sub-classed controller to change the output.
"""
paths = []
unique_paths = set() # type: Set[str]
for path in js_files:
if not is_absolute(path):
path = self.static_url(path) # depends on [control=['if'], data=[]]
if path not in unique_paths:
paths.append(path)
unique_paths.add(path) # depends on [control=['if'], data=['path', 'unique_paths']] # depends on [control=['for'], data=['path']]
return ''.join(('<script src="' + escape.xhtml_escape(p) + '" type="text/javascript"></script>' for p in paths)) |
def _should_catch_error(self, error, errors=()):
"""
Returns whether to catch the given error.
Args:
error (Exception): The error to consider.
errors (Tuple[Type[Exception], ...], optional): The exception types that should be
caught. Defaults to :class:`ElementNotFound` plus any driver-specific invalid
element errors.
Returns:
bool: Whether to catch the given error.
"""
caught_errors = (
errors or
self.session.driver.invalid_element_errors + (ElementNotFound,))
return isinstance(error, caught_errors) | def function[_should_catch_error, parameter[self, error, errors]]:
constant[
Returns whether to catch the given error.
Args:
error (Exception): The error to consider.
errors (Tuple[Type[Exception], ...], optional): The exception types that should be
caught. Defaults to :class:`ElementNotFound` plus any driver-specific invalid
element errors.
Returns:
bool: Whether to catch the given error.
]
variable[caught_errors] assign[=] <ast.BoolOp object at 0x7da1b025d5a0>
return[call[name[isinstance], parameter[name[error], name[caught_errors]]]] | keyword[def] identifier[_should_catch_error] ( identifier[self] , identifier[error] , identifier[errors] =()):
literal[string]
identifier[caught_errors] =(
identifier[errors] keyword[or]
identifier[self] . identifier[session] . identifier[driver] . identifier[invalid_element_errors] +( identifier[ElementNotFound] ,))
keyword[return] identifier[isinstance] ( identifier[error] , identifier[caught_errors] ) | def _should_catch_error(self, error, errors=()):
"""
Returns whether to catch the given error.
Args:
error (Exception): The error to consider.
errors (Tuple[Type[Exception], ...], optional): The exception types that should be
caught. Defaults to :class:`ElementNotFound` plus any driver-specific invalid
element errors.
Returns:
bool: Whether to catch the given error.
"""
caught_errors = errors or self.session.driver.invalid_element_errors + (ElementNotFound,)
return isinstance(error, caught_errors) |
def import_cfg(file_name, **kwargs):
""" Imports curves and surfaces from files in libconfig format.
.. note::
Requires `libconf <https://pypi.org/project/libconf/>`_ package.
Use ``jinja2=True`` to activate Jinja2 template processing. Please refer to the documentation for details.
:param file_name: name of the input file
:type file_name: str
:return: a list of rational spline geometries
:rtype: list
:raises GeomdlException: an error occurred writing the file
"""
def callback(data):
return libconf.loads(data)
# Check if it is possible to import 'libconf'
try:
import libconf
except ImportError:
raise exch.GeomdlException("Please install 'libconf' package to use libconfig format: pip install libconf")
# Get keyword arguments
delta = kwargs.get('delta', -1.0)
use_template = kwargs.get('jinja2', False)
# Read file
file_src = exch.read_file(file_name)
# Import data
return exch.import_dict_str(file_src=file_src, delta=delta, callback=callback, tmpl=use_template) | def function[import_cfg, parameter[file_name]]:
constant[ Imports curves and surfaces from files in libconfig format.
.. note::
Requires `libconf <https://pypi.org/project/libconf/>`_ package.
Use ``jinja2=True`` to activate Jinja2 template processing. Please refer to the documentation for details.
:param file_name: name of the input file
:type file_name: str
:return: a list of rational spline geometries
:rtype: list
:raises GeomdlException: an error occurred writing the file
]
def function[callback, parameter[data]]:
return[call[name[libconf].loads, parameter[name[data]]]]
<ast.Try object at 0x7da1b17b5660>
variable[delta] assign[=] call[name[kwargs].get, parameter[constant[delta], <ast.UnaryOp object at 0x7da1b16ab520>]]
variable[use_template] assign[=] call[name[kwargs].get, parameter[constant[jinja2], constant[False]]]
variable[file_src] assign[=] call[name[exch].read_file, parameter[name[file_name]]]
return[call[name[exch].import_dict_str, parameter[]]] | keyword[def] identifier[import_cfg] ( identifier[file_name] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[callback] ( identifier[data] ):
keyword[return] identifier[libconf] . identifier[loads] ( identifier[data] )
keyword[try] :
keyword[import] identifier[libconf]
keyword[except] identifier[ImportError] :
keyword[raise] identifier[exch] . identifier[GeomdlException] ( literal[string] )
identifier[delta] = identifier[kwargs] . identifier[get] ( literal[string] ,- literal[int] )
identifier[use_template] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] )
identifier[file_src] = identifier[exch] . identifier[read_file] ( identifier[file_name] )
keyword[return] identifier[exch] . identifier[import_dict_str] ( identifier[file_src] = identifier[file_src] , identifier[delta] = identifier[delta] , identifier[callback] = identifier[callback] , identifier[tmpl] = identifier[use_template] ) | def import_cfg(file_name, **kwargs):
""" Imports curves and surfaces from files in libconfig format.
.. note::
Requires `libconf <https://pypi.org/project/libconf/>`_ package.
Use ``jinja2=True`` to activate Jinja2 template processing. Please refer to the documentation for details.
:param file_name: name of the input file
:type file_name: str
:return: a list of rational spline geometries
:rtype: list
:raises GeomdlException: an error occurred writing the file
"""
def callback(data):
return libconf.loads(data)
# Check if it is possible to import 'libconf'
try:
import libconf # depends on [control=['try'], data=[]]
except ImportError:
raise exch.GeomdlException("Please install 'libconf' package to use libconfig format: pip install libconf") # depends on [control=['except'], data=[]]
# Get keyword arguments
delta = kwargs.get('delta', -1.0)
use_template = kwargs.get('jinja2', False)
# Read file
file_src = exch.read_file(file_name)
# Import data
return exch.import_dict_str(file_src=file_src, delta=delta, callback=callback, tmpl=use_template) |
def _format_line(self, side, flag, linenum, text):
"""Returns HTML markup of "from" / "to" text lines
side -- 0 or 1 indicating "from" or "to" text
flag -- indicates if difference on line
linenum -- line number (used for line number column)
text -- line text to be marked up
"""
try:
linenum = '%d' % linenum
id = ' id="%s%s"' % (self._prefix[side], linenum)
except TypeError:
# handle blank lines where linenum is '>' or ''
id = ''
# replace those things that would get confused with HTML symbols
text = (
text.replace("&", "&").replace(">",
">").replace("<", "<")
)
type_ = 'neutral'
if '\0+' in text:
type_ = 'add'
if '\0-' in text:
if type_ == 'add':
type_ = 'chg'
type_ = 'sub'
if '\0^' in text:
type_ = 'chg'
# make space non-breakable so they don't get compressed or line wrapped
text = text.replace(' ', ' ').rstrip()
return (
'<td class="diff_lno"%s>%s</td>'
'<td class="diff_line diff_line_%s">%s</td>' %
(id, linenum, type_, text)
) | def function[_format_line, parameter[self, side, flag, linenum, text]]:
constant[Returns HTML markup of "from" / "to" text lines
side -- 0 or 1 indicating "from" or "to" text
flag -- indicates if difference on line
linenum -- line number (used for line number column)
text -- line text to be marked up
]
<ast.Try object at 0x7da20e9b0bb0>
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[&], constant[&]]].replace, parameter[constant[>], constant[>]]].replace, parameter[constant[<], constant[<]]]
variable[type_] assign[=] constant[neutral]
if compare[constant[ +] in name[text]] begin[:]
variable[type_] assign[=] constant[add]
if compare[constant[ -] in name[text]] begin[:]
if compare[name[type_] equal[==] constant[add]] begin[:]
variable[type_] assign[=] constant[chg]
variable[type_] assign[=] constant[sub]
if compare[constant[ ^] in name[text]] begin[:]
variable[type_] assign[=] constant[chg]
variable[text] assign[=] call[call[name[text].replace, parameter[constant[ ], constant[ ]]].rstrip, parameter[]]
return[binary_operation[constant[<td class="diff_lno"%s>%s</td><td class="diff_line diff_line_%s">%s</td>] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18eb562c0>, <ast.Name object at 0x7da18eb544c0>, <ast.Name object at 0x7da18eb567d0>, <ast.Name object at 0x7da18eb540a0>]]]] | keyword[def] identifier[_format_line] ( identifier[self] , identifier[side] , identifier[flag] , identifier[linenum] , identifier[text] ):
literal[string]
keyword[try] :
identifier[linenum] = literal[string] % identifier[linenum]
identifier[id] = literal[string] %( identifier[self] . identifier[_prefix] [ identifier[side] ], identifier[linenum] )
keyword[except] identifier[TypeError] :
identifier[id] = literal[string]
identifier[text] =(
identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] ,
literal[string] ). identifier[replace] ( literal[string] , literal[string] )
)
identifier[type_] = literal[string]
keyword[if] literal[string] keyword[in] identifier[text] :
identifier[type_] = literal[string]
keyword[if] literal[string] keyword[in] identifier[text] :
keyword[if] identifier[type_] == literal[string] :
identifier[type_] = literal[string]
identifier[type_] = literal[string]
keyword[if] literal[string] keyword[in] identifier[text] :
identifier[type_] = literal[string]
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[rstrip] ()
keyword[return] (
literal[string]
literal[string] %
( identifier[id] , identifier[linenum] , identifier[type_] , identifier[text] )
) | def _format_line(self, side, flag, linenum, text):
"""Returns HTML markup of "from" / "to" text lines
side -- 0 or 1 indicating "from" or "to" text
flag -- indicates if difference on line
linenum -- line number (used for line number column)
text -- line text to be marked up
"""
try:
linenum = '%d' % linenum
id = ' id="%s%s"' % (self._prefix[side], linenum) # depends on [control=['try'], data=[]]
except TypeError:
# handle blank lines where linenum is '>' or ''
id = '' # depends on [control=['except'], data=[]]
# replace those things that would get confused with HTML symbols
text = text.replace('&', '&').replace('>', '>').replace('<', '<')
type_ = 'neutral'
if '\x00+' in text:
type_ = 'add' # depends on [control=['if'], data=[]]
if '\x00-' in text:
if type_ == 'add':
type_ = 'chg' # depends on [control=['if'], data=['type_']]
type_ = 'sub' # depends on [control=['if'], data=[]]
if '\x00^' in text:
type_ = 'chg' # depends on [control=['if'], data=[]]
# make space non-breakable so they don't get compressed or line wrapped
text = text.replace(' ', ' ').rstrip()
return '<td class="diff_lno"%s>%s</td><td class="diff_line diff_line_%s">%s</td>' % (id, linenum, type_, text) |
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(LoadAverageCollector, self).get_default_config()
config.update({
'path': 'loadavg',
'simple': 'False'
})
return config | def function[get_default_config, parameter[self]]:
constant[
Returns the default collector settings
]
variable[config] assign[=] call[call[name[super], parameter[name[LoadAverageCollector], name[self]]].get_default_config, parameter[]]
call[name[config].update, parameter[dictionary[[<ast.Constant object at 0x7da2049610f0>, <ast.Constant object at 0x7da204963130>], [<ast.Constant object at 0x7da204963cd0>, <ast.Constant object at 0x7da204963d30>]]]]
return[name[config]] | keyword[def] identifier[get_default_config] ( identifier[self] ):
literal[string]
identifier[config] = identifier[super] ( identifier[LoadAverageCollector] , identifier[self] ). identifier[get_default_config] ()
identifier[config] . identifier[update] ({
literal[string] : literal[string] ,
literal[string] : literal[string]
})
keyword[return] identifier[config] | def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(LoadAverageCollector, self).get_default_config()
config.update({'path': 'loadavg', 'simple': 'False'})
return config |
def bandit(self, choice_rewards):
"""
Multi-armed bandit method which chooses the arm for which the upper
confidence bound (UCB) of expected reward is greatest.
If there are multiple arms with the same UCB1 index, then one is chosen
at random.
An explanation is here:
https://www.cs.bham.ac.uk/internal/courses/robotics/lectures/ucb1.pdf
"""
# count the larger of 1 and the total number of arm pulls
total_pulls = max(1, sum(len(r) for r in choice_rewards.values()))
def ucb1(choice):
rewards = choice_rewards[choice]
choice_pulls = max(len(rewards), 1)
average_reward = np.nanmean(rewards) if len(rewards) else 0
error = np.sqrt(2.0 * np.log(total_pulls) / choice_pulls)
return average_reward + error
return max(shuffle(choice_rewards), key=ucb1) | def function[bandit, parameter[self, choice_rewards]]:
constant[
Multi-armed bandit method which chooses the arm for which the upper
confidence bound (UCB) of expected reward is greatest.
If there are multiple arms with the same UCB1 index, then one is chosen
at random.
An explanation is here:
https://www.cs.bham.ac.uk/internal/courses/robotics/lectures/ucb1.pdf
]
variable[total_pulls] assign[=] call[name[max], parameter[constant[1], call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b08fb760>]]]]
def function[ucb1, parameter[choice]]:
variable[rewards] assign[=] call[name[choice_rewards]][name[choice]]
variable[choice_pulls] assign[=] call[name[max], parameter[call[name[len], parameter[name[rewards]]], constant[1]]]
variable[average_reward] assign[=] <ast.IfExp object at 0x7da1b08fb7f0>
variable[error] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[constant[2.0] * call[name[np].log, parameter[name[total_pulls]]]] / name[choice_pulls]]]]
return[binary_operation[name[average_reward] + name[error]]]
return[call[name[max], parameter[call[name[shuffle], parameter[name[choice_rewards]]]]]] | keyword[def] identifier[bandit] ( identifier[self] , identifier[choice_rewards] ):
literal[string]
identifier[total_pulls] = identifier[max] ( literal[int] , identifier[sum] ( identifier[len] ( identifier[r] ) keyword[for] identifier[r] keyword[in] identifier[choice_rewards] . identifier[values] ()))
keyword[def] identifier[ucb1] ( identifier[choice] ):
identifier[rewards] = identifier[choice_rewards] [ identifier[choice] ]
identifier[choice_pulls] = identifier[max] ( identifier[len] ( identifier[rewards] ), literal[int] )
identifier[average_reward] = identifier[np] . identifier[nanmean] ( identifier[rewards] ) keyword[if] identifier[len] ( identifier[rewards] ) keyword[else] literal[int]
identifier[error] = identifier[np] . identifier[sqrt] ( literal[int] * identifier[np] . identifier[log] ( identifier[total_pulls] )/ identifier[choice_pulls] )
keyword[return] identifier[average_reward] + identifier[error]
keyword[return] identifier[max] ( identifier[shuffle] ( identifier[choice_rewards] ), identifier[key] = identifier[ucb1] ) | def bandit(self, choice_rewards):
"""
Multi-armed bandit method which chooses the arm for which the upper
confidence bound (UCB) of expected reward is greatest.
If there are multiple arms with the same UCB1 index, then one is chosen
at random.
An explanation is here:
https://www.cs.bham.ac.uk/internal/courses/robotics/lectures/ucb1.pdf
"""
# count the larger of 1 and the total number of arm pulls
total_pulls = max(1, sum((len(r) for r in choice_rewards.values())))
def ucb1(choice):
rewards = choice_rewards[choice]
choice_pulls = max(len(rewards), 1)
average_reward = np.nanmean(rewards) if len(rewards) else 0
error = np.sqrt(2.0 * np.log(total_pulls) / choice_pulls)
return average_reward + error
return max(shuffle(choice_rewards), key=ucb1) |
def airspeed_autocal_encode(self, vx, vy, vz, diff_pressure, EAS2TAS, ratio, state_x, state_y, state_z, Pax, Pby, Pcz):
'''
Airspeed auto-calibration
vx : GPS velocity north m/s (float)
vy : GPS velocity east m/s (float)
vz : GPS velocity down m/s (float)
diff_pressure : Differential pressure pascals (float)
EAS2TAS : Estimated to true airspeed ratio (float)
ratio : Airspeed ratio (float)
state_x : EKF state x (float)
state_y : EKF state y (float)
state_z : EKF state z (float)
Pax : EKF Pax (float)
Pby : EKF Pby (float)
Pcz : EKF Pcz (float)
'''
return MAVLink_airspeed_autocal_message(vx, vy, vz, diff_pressure, EAS2TAS, ratio, state_x, state_y, state_z, Pax, Pby, Pcz) | def function[airspeed_autocal_encode, parameter[self, vx, vy, vz, diff_pressure, EAS2TAS, ratio, state_x, state_y, state_z, Pax, Pby, Pcz]]:
constant[
Airspeed auto-calibration
vx : GPS velocity north m/s (float)
vy : GPS velocity east m/s (float)
vz : GPS velocity down m/s (float)
diff_pressure : Differential pressure pascals (float)
EAS2TAS : Estimated to true airspeed ratio (float)
ratio : Airspeed ratio (float)
state_x : EKF state x (float)
state_y : EKF state y (float)
state_z : EKF state z (float)
Pax : EKF Pax (float)
Pby : EKF Pby (float)
Pcz : EKF Pcz (float)
]
return[call[name[MAVLink_airspeed_autocal_message], parameter[name[vx], name[vy], name[vz], name[diff_pressure], name[EAS2TAS], name[ratio], name[state_x], name[state_y], name[state_z], name[Pax], name[Pby], name[Pcz]]]] | keyword[def] identifier[airspeed_autocal_encode] ( identifier[self] , identifier[vx] , identifier[vy] , identifier[vz] , identifier[diff_pressure] , identifier[EAS2TAS] , identifier[ratio] , identifier[state_x] , identifier[state_y] , identifier[state_z] , identifier[Pax] , identifier[Pby] , identifier[Pcz] ):
literal[string]
keyword[return] identifier[MAVLink_airspeed_autocal_message] ( identifier[vx] , identifier[vy] , identifier[vz] , identifier[diff_pressure] , identifier[EAS2TAS] , identifier[ratio] , identifier[state_x] , identifier[state_y] , identifier[state_z] , identifier[Pax] , identifier[Pby] , identifier[Pcz] ) | def airspeed_autocal_encode(self, vx, vy, vz, diff_pressure, EAS2TAS, ratio, state_x, state_y, state_z, Pax, Pby, Pcz):
"""
Airspeed auto-calibration
vx : GPS velocity north m/s (float)
vy : GPS velocity east m/s (float)
vz : GPS velocity down m/s (float)
diff_pressure : Differential pressure pascals (float)
EAS2TAS : Estimated to true airspeed ratio (float)
ratio : Airspeed ratio (float)
state_x : EKF state x (float)
state_y : EKF state y (float)
state_z : EKF state z (float)
Pax : EKF Pax (float)
Pby : EKF Pby (float)
Pcz : EKF Pcz (float)
"""
return MAVLink_airspeed_autocal_message(vx, vy, vz, diff_pressure, EAS2TAS, ratio, state_x, state_y, state_z, Pax, Pby, Pcz) |
def _aes_decrypt(data, algorithm, key):
'''AES decrypt'''
if algorithm['subtype'] == 'cbc':
mode = AES.MODE_CBC
else:
raise Exception('AES subtype not supported: %s'
% algorithm['subtype'])
iv_size = algorithm['iv_size']
if 'iv' in algorithm and algorithm['iv']:
if len(algorithm['iv']) != algorithm['iv_size']:
raise Exception('Invalid IV size')
iv_value = algorithm['iv']
enc = data
else:
iv_value = data[:iv_size]
enc = data[iv_size:]
dec = AES.new(key, mode, iv_value).decrypt(enc)
numpad = ord(dec[-1])
dec = dec[0:-numpad]
return dec | def function[_aes_decrypt, parameter[data, algorithm, key]]:
constant[AES decrypt]
if compare[call[name[algorithm]][constant[subtype]] equal[==] constant[cbc]] begin[:]
variable[mode] assign[=] name[AES].MODE_CBC
variable[iv_size] assign[=] call[name[algorithm]][constant[iv_size]]
if <ast.BoolOp object at 0x7da1afea8be0> begin[:]
if compare[call[name[len], parameter[call[name[algorithm]][constant[iv]]]] not_equal[!=] call[name[algorithm]][constant[iv_size]]] begin[:]
<ast.Raise object at 0x7da1afeabe20>
variable[iv_value] assign[=] call[name[algorithm]][constant[iv]]
variable[enc] assign[=] name[data]
variable[dec] assign[=] call[call[name[AES].new, parameter[name[key], name[mode], name[iv_value]]].decrypt, parameter[name[enc]]]
variable[numpad] assign[=] call[name[ord], parameter[call[name[dec]][<ast.UnaryOp object at 0x7da1afeabe80>]]]
variable[dec] assign[=] call[name[dec]][<ast.Slice object at 0x7da1afea9420>]
return[name[dec]] | keyword[def] identifier[_aes_decrypt] ( identifier[data] , identifier[algorithm] , identifier[key] ):
literal[string]
keyword[if] identifier[algorithm] [ literal[string] ]== literal[string] :
identifier[mode] = identifier[AES] . identifier[MODE_CBC]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string]
% identifier[algorithm] [ literal[string] ])
identifier[iv_size] = identifier[algorithm] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[algorithm] keyword[and] identifier[algorithm] [ literal[string] ]:
keyword[if] identifier[len] ( identifier[algorithm] [ literal[string] ])!= identifier[algorithm] [ literal[string] ]:
keyword[raise] identifier[Exception] ( literal[string] )
identifier[iv_value] = identifier[algorithm] [ literal[string] ]
identifier[enc] = identifier[data]
keyword[else] :
identifier[iv_value] = identifier[data] [: identifier[iv_size] ]
identifier[enc] = identifier[data] [ identifier[iv_size] :]
identifier[dec] = identifier[AES] . identifier[new] ( identifier[key] , identifier[mode] , identifier[iv_value] ). identifier[decrypt] ( identifier[enc] )
identifier[numpad] = identifier[ord] ( identifier[dec] [- literal[int] ])
identifier[dec] = identifier[dec] [ literal[int] :- identifier[numpad] ]
keyword[return] identifier[dec] | def _aes_decrypt(data, algorithm, key):
"""AES decrypt"""
if algorithm['subtype'] == 'cbc':
mode = AES.MODE_CBC # depends on [control=['if'], data=[]]
else:
raise Exception('AES subtype not supported: %s' % algorithm['subtype'])
iv_size = algorithm['iv_size']
if 'iv' in algorithm and algorithm['iv']:
if len(algorithm['iv']) != algorithm['iv_size']:
raise Exception('Invalid IV size') # depends on [control=['if'], data=[]]
iv_value = algorithm['iv']
enc = data # depends on [control=['if'], data=[]]
else:
iv_value = data[:iv_size]
enc = data[iv_size:]
dec = AES.new(key, mode, iv_value).decrypt(enc)
numpad = ord(dec[-1])
dec = dec[0:-numpad]
return dec |
def _color_variant(hex_color, brightness_offset=1):
"""Takes a color like #87c95f and produces a lighter or darker variant.
Code adapted from method proposed by Chase Seibert at:
http://chase-seibert.github.io/blog/2011/07/29/python-calculate-lighterdarker-rgb-colors.html.
Args:
hex_color (str): The original hex color.
brightness_offset (int): The amount to shift the color by.
Returns:
new_color (str): The new hex color variant.
Raises:
Exception: if the len of the hex_color isn't the appropriate length (7).
"""
if len(hex_color) != 7:
raise Exception("Passed %s into color_variant(), needs to be in #87c95f format." % hex_color)
rgb_hex = [hex_color[x:x+2] for x in [1, 3, 5]]
new_rgb_int = [int(hex_value, 16) + brightness_offset for hex_value in rgb_hex]
new_rgb_int = [min([255, max([0, i])]) for i in new_rgb_int] # make sure new values are between 0 and 255
# hex() produces "0x88", we want just "88"
new_color = "#"
for i in new_rgb_int:
if len(hex(i)[2:]) == 2:
new_color += hex(i)[2:]
else:
new_color += hex(i)[2:] + "0"
return new_color | def function[_color_variant, parameter[hex_color, brightness_offset]]:
constant[Takes a color like #87c95f and produces a lighter or darker variant.
Code adapted from method proposed by Chase Seibert at:
http://chase-seibert.github.io/blog/2011/07/29/python-calculate-lighterdarker-rgb-colors.html.
Args:
hex_color (str): The original hex color.
brightness_offset (int): The amount to shift the color by.
Returns:
new_color (str): The new hex color variant.
Raises:
Exception: if the len of the hex_color isn't the appropriate length (7).
]
if compare[call[name[len], parameter[name[hex_color]]] not_equal[!=] constant[7]] begin[:]
<ast.Raise object at 0x7da2049631f0>
variable[rgb_hex] assign[=] <ast.ListComp object at 0x7da204963dc0>
variable[new_rgb_int] assign[=] <ast.ListComp object at 0x7da2049634c0>
variable[new_rgb_int] assign[=] <ast.ListComp object at 0x7da2049614b0>
variable[new_color] assign[=] constant[#]
for taget[name[i]] in starred[name[new_rgb_int]] begin[:]
if compare[call[name[len], parameter[call[call[name[hex], parameter[name[i]]]][<ast.Slice object at 0x7da204960400>]]] equal[==] constant[2]] begin[:]
<ast.AugAssign object at 0x7da204963730>
return[name[new_color]] | keyword[def] identifier[_color_variant] ( identifier[hex_color] , identifier[brightness_offset] = literal[int] ):
literal[string]
keyword[if] identifier[len] ( identifier[hex_color] )!= literal[int] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[hex_color] )
identifier[rgb_hex] =[ identifier[hex_color] [ identifier[x] : identifier[x] + literal[int] ] keyword[for] identifier[x] keyword[in] [ literal[int] , literal[int] , literal[int] ]]
identifier[new_rgb_int] =[ identifier[int] ( identifier[hex_value] , literal[int] )+ identifier[brightness_offset] keyword[for] identifier[hex_value] keyword[in] identifier[rgb_hex] ]
identifier[new_rgb_int] =[ identifier[min] ([ literal[int] , identifier[max] ([ literal[int] , identifier[i] ])]) keyword[for] identifier[i] keyword[in] identifier[new_rgb_int] ]
identifier[new_color] = literal[string]
keyword[for] identifier[i] keyword[in] identifier[new_rgb_int] :
keyword[if] identifier[len] ( identifier[hex] ( identifier[i] )[ literal[int] :])== literal[int] :
identifier[new_color] += identifier[hex] ( identifier[i] )[ literal[int] :]
keyword[else] :
identifier[new_color] += identifier[hex] ( identifier[i] )[ literal[int] :]+ literal[string]
keyword[return] identifier[new_color] | def _color_variant(hex_color, brightness_offset=1):
"""Takes a color like #87c95f and produces a lighter or darker variant.
Code adapted from method proposed by Chase Seibert at:
http://chase-seibert.github.io/blog/2011/07/29/python-calculate-lighterdarker-rgb-colors.html.
Args:
hex_color (str): The original hex color.
brightness_offset (int): The amount to shift the color by.
Returns:
new_color (str): The new hex color variant.
Raises:
Exception: if the len of the hex_color isn't the appropriate length (7).
"""
if len(hex_color) != 7:
raise Exception('Passed %s into color_variant(), needs to be in #87c95f format.' % hex_color) # depends on [control=['if'], data=[]]
rgb_hex = [hex_color[x:x + 2] for x in [1, 3, 5]]
new_rgb_int = [int(hex_value, 16) + brightness_offset for hex_value in rgb_hex]
new_rgb_int = [min([255, max([0, i])]) for i in new_rgb_int] # make sure new values are between 0 and 255
# hex() produces "0x88", we want just "88"
new_color = '#'
for i in new_rgb_int:
if len(hex(i)[2:]) == 2:
new_color += hex(i)[2:] # depends on [control=['if'], data=[]]
else:
new_color += hex(i)[2:] + '0' # depends on [control=['for'], data=['i']]
return new_color |
def transform_to_2d(data, max_axis):
"""
Projects 3d data cube along one axis using maximum intensity with
preservation of the signs. Adapted from nilearn.
"""
import numpy as np
# get the shape of the array we are projecting to
new_shape = list(data.shape)
del new_shape[max_axis]
# generate a 3D indexing array that points to max abs value in the
# current projection
a1, a2 = np.indices(new_shape)
inds = [a1, a2]
inds.insert(max_axis, np.abs(data).argmax(axis=max_axis))
# take the values where the absolute value of the projection
# is the highest
maximum_intensity_data = data[inds]
return np.rot90(maximum_intensity_data) | def function[transform_to_2d, parameter[data, max_axis]]:
constant[
Projects 3d data cube along one axis using maximum intensity with
preservation of the signs. Adapted from nilearn.
]
import module[numpy] as alias[np]
variable[new_shape] assign[=] call[name[list], parameter[name[data].shape]]
<ast.Delete object at 0x7da1b0608b80>
<ast.Tuple object at 0x7da1b0608b50> assign[=] call[name[np].indices, parameter[name[new_shape]]]
variable[inds] assign[=] list[[<ast.Name object at 0x7da1b060a1d0>, <ast.Name object at 0x7da1b060b3d0>]]
call[name[inds].insert, parameter[name[max_axis], call[call[name[np].abs, parameter[name[data]]].argmax, parameter[]]]]
variable[maximum_intensity_data] assign[=] call[name[data]][name[inds]]
return[call[name[np].rot90, parameter[name[maximum_intensity_data]]]] | keyword[def] identifier[transform_to_2d] ( identifier[data] , identifier[max_axis] ):
literal[string]
keyword[import] identifier[numpy] keyword[as] identifier[np]
identifier[new_shape] = identifier[list] ( identifier[data] . identifier[shape] )
keyword[del] identifier[new_shape] [ identifier[max_axis] ]
identifier[a1] , identifier[a2] = identifier[np] . identifier[indices] ( identifier[new_shape] )
identifier[inds] =[ identifier[a1] , identifier[a2] ]
identifier[inds] . identifier[insert] ( identifier[max_axis] , identifier[np] . identifier[abs] ( identifier[data] ). identifier[argmax] ( identifier[axis] = identifier[max_axis] ))
identifier[maximum_intensity_data] = identifier[data] [ identifier[inds] ]
keyword[return] identifier[np] . identifier[rot90] ( identifier[maximum_intensity_data] ) | def transform_to_2d(data, max_axis):
"""
Projects 3d data cube along one axis using maximum intensity with
preservation of the signs. Adapted from nilearn.
"""
import numpy as np
# get the shape of the array we are projecting to
new_shape = list(data.shape)
del new_shape[max_axis]
# generate a 3D indexing array that points to max abs value in the
# current projection
(a1, a2) = np.indices(new_shape)
inds = [a1, a2]
inds.insert(max_axis, np.abs(data).argmax(axis=max_axis))
# take the values where the absolute value of the projection
# is the highest
maximum_intensity_data = data[inds]
return np.rot90(maximum_intensity_data) |
def wait_for_port(self, port, timeout=10, **probe_kwargs):
"""
block until specified port starts accepting connections, raises an exc ProbeTimeout
if timeout is reached
:param port: int, port number
:param timeout: int or float (seconds), time to wait for establishing the connection
:param probe_kwargs: arguments passed to Probe constructor
:return: None
"""
Probe(timeout=timeout, fnc=functools.partial(self.is_port_open, port), **probe_kwargs).run() | def function[wait_for_port, parameter[self, port, timeout]]:
constant[
block until specified port starts accepting connections, raises an exc ProbeTimeout
if timeout is reached
:param port: int, port number
:param timeout: int or float (seconds), time to wait for establishing the connection
:param probe_kwargs: arguments passed to Probe constructor
:return: None
]
call[call[name[Probe], parameter[]].run, parameter[]] | keyword[def] identifier[wait_for_port] ( identifier[self] , identifier[port] , identifier[timeout] = literal[int] ,** identifier[probe_kwargs] ):
literal[string]
identifier[Probe] ( identifier[timeout] = identifier[timeout] , identifier[fnc] = identifier[functools] . identifier[partial] ( identifier[self] . identifier[is_port_open] , identifier[port] ),** identifier[probe_kwargs] ). identifier[run] () | def wait_for_port(self, port, timeout=10, **probe_kwargs):
"""
block until specified port starts accepting connections, raises an exc ProbeTimeout
if timeout is reached
:param port: int, port number
:param timeout: int or float (seconds), time to wait for establishing the connection
:param probe_kwargs: arguments passed to Probe constructor
:return: None
"""
Probe(timeout=timeout, fnc=functools.partial(self.is_port_open, port), **probe_kwargs).run() |
def data(self, data, part=False, dataset=''):
"""
Parameters
----------
data : `str`
Text to parse.
part : `bool`, optional
True if data is partial (default: `False`).
dataset : `str`, optional
Dataset key prefix (default: '').
"""
return super().data(data, part) | def function[data, parameter[self, data, part, dataset]]:
constant[
Parameters
----------
data : `str`
Text to parse.
part : `bool`, optional
True if data is partial (default: `False`).
dataset : `str`, optional
Dataset key prefix (default: '').
]
return[call[call[name[super], parameter[]].data, parameter[name[data], name[part]]]] | keyword[def] identifier[data] ( identifier[self] , identifier[data] , identifier[part] = keyword[False] , identifier[dataset] = literal[string] ):
literal[string]
keyword[return] identifier[super] (). identifier[data] ( identifier[data] , identifier[part] ) | def data(self, data, part=False, dataset=''):
"""
Parameters
----------
data : `str`
Text to parse.
part : `bool`, optional
True if data is partial (default: `False`).
dataset : `str`, optional
Dataset key prefix (default: '').
"""
return super().data(data, part) |
def zoom_1_to_1(self):
"""Zoom the view to a 1 to 1 pixel ratio (100 %%).
"""
viewer = self.getfocus_viewer()
if hasattr(viewer, 'scale_to'):
viewer.scale_to(1.0, 1.0)
return True | def function[zoom_1_to_1, parameter[self]]:
constant[Zoom the view to a 1 to 1 pixel ratio (100 %%).
]
variable[viewer] assign[=] call[name[self].getfocus_viewer, parameter[]]
if call[name[hasattr], parameter[name[viewer], constant[scale_to]]] begin[:]
call[name[viewer].scale_to, parameter[constant[1.0], constant[1.0]]]
return[constant[True]] | keyword[def] identifier[zoom_1_to_1] ( identifier[self] ):
literal[string]
identifier[viewer] = identifier[self] . identifier[getfocus_viewer] ()
keyword[if] identifier[hasattr] ( identifier[viewer] , literal[string] ):
identifier[viewer] . identifier[scale_to] ( literal[int] , literal[int] )
keyword[return] keyword[True] | def zoom_1_to_1(self):
"""Zoom the view to a 1 to 1 pixel ratio (100 %%).
"""
viewer = self.getfocus_viewer()
if hasattr(viewer, 'scale_to'):
viewer.scale_to(1.0, 1.0) # depends on [control=['if'], data=[]]
return True |
def activate(self):
""" Avoid overhead in calling glUseProgram with same arg.
Warning: this will break if glUseProgram is used somewhere else.
Per context we keep track of one current program.
"""
if self._handle != self._parser.env.get('current_program', False):
self._parser.env['current_program'] = self._handle
gl.glUseProgram(self._handle) | def function[activate, parameter[self]]:
constant[ Avoid overhead in calling glUseProgram with same arg.
Warning: this will break if glUseProgram is used somewhere else.
Per context we keep track of one current program.
]
if compare[name[self]._handle not_equal[!=] call[name[self]._parser.env.get, parameter[constant[current_program], constant[False]]]] begin[:]
call[name[self]._parser.env][constant[current_program]] assign[=] name[self]._handle
call[name[gl].glUseProgram, parameter[name[self]._handle]] | keyword[def] identifier[activate] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_handle] != identifier[self] . identifier[_parser] . identifier[env] . identifier[get] ( literal[string] , keyword[False] ):
identifier[self] . identifier[_parser] . identifier[env] [ literal[string] ]= identifier[self] . identifier[_handle]
identifier[gl] . identifier[glUseProgram] ( identifier[self] . identifier[_handle] ) | def activate(self):
""" Avoid overhead in calling glUseProgram with same arg.
Warning: this will break if glUseProgram is used somewhere else.
Per context we keep track of one current program.
"""
if self._handle != self._parser.env.get('current_program', False):
self._parser.env['current_program'] = self._handle
gl.glUseProgram(self._handle) # depends on [control=['if'], data=[]] |
def set_property(self, name, value):
"""set_property(property_name: str, value: object)
Set property *property_name* to *value*.
"""
if not hasattr(self.props, name):
raise TypeError("Unknown property: %r" % name)
setattr(self.props, name, value) | def function[set_property, parameter[self, name, value]]:
constant[set_property(property_name: str, value: object)
Set property *property_name* to *value*.
]
if <ast.UnaryOp object at 0x7da1b0fae5f0> begin[:]
<ast.Raise object at 0x7da1b0faf2e0>
call[name[setattr], parameter[name[self].props, name[name], name[value]]] | keyword[def] identifier[set_property] ( identifier[self] , identifier[name] , identifier[value] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] . identifier[props] , identifier[name] ):
keyword[raise] identifier[TypeError] ( literal[string] % identifier[name] )
identifier[setattr] ( identifier[self] . identifier[props] , identifier[name] , identifier[value] ) | def set_property(self, name, value):
"""set_property(property_name: str, value: object)
Set property *property_name* to *value*.
"""
if not hasattr(self.props, name):
raise TypeError('Unknown property: %r' % name) # depends on [control=['if'], data=[]]
setattr(self.props, name, value) |
def optimize(self):
"""Analogous to `sklearn`'s fit. Returns `self` to enable chaining."""
def te(weights, r, proxies):
"""Helper func. `pyfinance.tracking_error` doesn't work here."""
if isinstance(weights, list):
weights = np.array(weights)
proxy = np.sum(proxies * weights, axis=1)
te = np.std(proxy - r) # not anlzd...
return te
ew = utils.equal_weights(n=self.n, sumto=self.sumto)
bnds = tuple((0, 1) for x in range(self.n))
cons = {"type": "eq", "fun": lambda x: np.sum(x) - self.sumto}
xs = []
funs = []
for i, j in zip(self._r, self._proxies):
opt = sco.minimize(
te,
x0=ew,
args=(i, j),
method="SLSQP",
bounds=bnds,
constraints=cons,
)
x, fun = opt["x"], opt["fun"]
xs.append(x)
funs.append(fun)
self._xs = np.array(xs)
self._funs = np.array(funs)
return self | def function[optimize, parameter[self]]:
constant[Analogous to `sklearn`'s fit. Returns `self` to enable chaining.]
def function[te, parameter[weights, r, proxies]]:
constant[Helper func. `pyfinance.tracking_error` doesn't work here.]
if call[name[isinstance], parameter[name[weights], name[list]]] begin[:]
variable[weights] assign[=] call[name[np].array, parameter[name[weights]]]
variable[proxy] assign[=] call[name[np].sum, parameter[binary_operation[name[proxies] * name[weights]]]]
variable[te] assign[=] call[name[np].std, parameter[binary_operation[name[proxy] - name[r]]]]
return[name[te]]
variable[ew] assign[=] call[name[utils].equal_weights, parameter[]]
variable[bnds] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da18bcc8940>]]
variable[cons] assign[=] dictionary[[<ast.Constant object at 0x7da18bcc8c10>, <ast.Constant object at 0x7da18bcca4a0>], [<ast.Constant object at 0x7da18bcc92d0>, <ast.Lambda object at 0x7da18bcc8490>]]
variable[xs] assign[=] list[[]]
variable[funs] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18bccb940>, <ast.Name object at 0x7da18bcc9fc0>]]] in starred[call[name[zip], parameter[name[self]._r, name[self]._proxies]]] begin[:]
variable[opt] assign[=] call[name[sco].minimize, parameter[name[te]]]
<ast.Tuple object at 0x7da2047e8a60> assign[=] tuple[[<ast.Subscript object at 0x7da2047e8880>, <ast.Subscript object at 0x7da2047e9c90>]]
call[name[xs].append, parameter[name[x]]]
call[name[funs].append, parameter[name[fun]]]
name[self]._xs assign[=] call[name[np].array, parameter[name[xs]]]
name[self]._funs assign[=] call[name[np].array, parameter[name[funs]]]
return[name[self]] | keyword[def] identifier[optimize] ( identifier[self] ):
literal[string]
keyword[def] identifier[te] ( identifier[weights] , identifier[r] , identifier[proxies] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[weights] , identifier[list] ):
identifier[weights] = identifier[np] . identifier[array] ( identifier[weights] )
identifier[proxy] = identifier[np] . identifier[sum] ( identifier[proxies] * identifier[weights] , identifier[axis] = literal[int] )
identifier[te] = identifier[np] . identifier[std] ( identifier[proxy] - identifier[r] )
keyword[return] identifier[te]
identifier[ew] = identifier[utils] . identifier[equal_weights] ( identifier[n] = identifier[self] . identifier[n] , identifier[sumto] = identifier[self] . identifier[sumto] )
identifier[bnds] = identifier[tuple] (( literal[int] , literal[int] ) keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[self] . identifier[n] ))
identifier[cons] ={ literal[string] : literal[string] , literal[string] : keyword[lambda] identifier[x] : identifier[np] . identifier[sum] ( identifier[x] )- identifier[self] . identifier[sumto] }
identifier[xs] =[]
identifier[funs] =[]
keyword[for] identifier[i] , identifier[j] keyword[in] identifier[zip] ( identifier[self] . identifier[_r] , identifier[self] . identifier[_proxies] ):
identifier[opt] = identifier[sco] . identifier[minimize] (
identifier[te] ,
identifier[x0] = identifier[ew] ,
identifier[args] =( identifier[i] , identifier[j] ),
identifier[method] = literal[string] ,
identifier[bounds] = identifier[bnds] ,
identifier[constraints] = identifier[cons] ,
)
identifier[x] , identifier[fun] = identifier[opt] [ literal[string] ], identifier[opt] [ literal[string] ]
identifier[xs] . identifier[append] ( identifier[x] )
identifier[funs] . identifier[append] ( identifier[fun] )
identifier[self] . identifier[_xs] = identifier[np] . identifier[array] ( identifier[xs] )
identifier[self] . identifier[_funs] = identifier[np] . identifier[array] ( identifier[funs] )
keyword[return] identifier[self] | def optimize(self):
"""Analogous to `sklearn`'s fit. Returns `self` to enable chaining."""
def te(weights, r, proxies):
"""Helper func. `pyfinance.tracking_error` doesn't work here."""
if isinstance(weights, list):
weights = np.array(weights) # depends on [control=['if'], data=[]]
proxy = np.sum(proxies * weights, axis=1)
te = np.std(proxy - r) # not anlzd...
return te
ew = utils.equal_weights(n=self.n, sumto=self.sumto)
bnds = tuple(((0, 1) for x in range(self.n)))
cons = {'type': 'eq', 'fun': lambda x: np.sum(x) - self.sumto}
xs = []
funs = []
for (i, j) in zip(self._r, self._proxies):
opt = sco.minimize(te, x0=ew, args=(i, j), method='SLSQP', bounds=bnds, constraints=cons)
(x, fun) = (opt['x'], opt['fun'])
xs.append(x)
funs.append(fun) # depends on [control=['for'], data=[]]
self._xs = np.array(xs)
self._funs = np.array(funs)
return self |
def _do_parse(inp, fmt, encoding, force_types):
"""Actually parse input.
Args:
inp: bytes yielding file-like object
fmt: format to use for parsing
encoding: encoding of `inp`
force_types:
if `True`, integers, floats, booleans and none/null
are recognized and returned as proper types instead of strings;
if `False`, everything is converted to strings
if `None`, backend return value is used
Returns:
parsed `inp` (dict or list) containing unicode values
Raises:
various sorts of errors raised by used libraries while parsing
"""
res = {}
_check_lib_installed(fmt, 'parse')
if fmt == 'ini':
cfg = configobj.ConfigObj(inp, encoding=encoding)
res = cfg.dict()
elif fmt == 'json':
if six.PY3:
# python 3 json only reads from unicode objects
inp = io.TextIOWrapper(inp, encoding=encoding)
res = json.load(inp, encoding=encoding)
elif fmt == 'json5':
if six.PY3:
inp = io.TextIOWrapper(inp, encoding=encoding)
res = json5.load(inp, encoding=encoding)
elif fmt == 'toml':
if not _is_utf8(encoding):
raise AnyMarkupError('toml is always utf-8 encoded according to specification')
if six.PY3:
# python 3 toml prefers unicode objects
inp = io.TextIOWrapper(inp, encoding=encoding)
res = toml.load(inp)
elif fmt == 'xml':
res = xmltodict.parse(inp, encoding=encoding)
elif fmt == 'yaml':
# guesses encoding by its own, there seems to be no way to pass
# it explicitly
res = yaml.safe_load(inp)
else:
raise # unknown format
# make sure it's all unicode and all int/float values were parsed correctly
# the unicode part is here because of yaml on PY2 and also as workaround for
# https://github.com/DiffSK/configobj/issues/18#issuecomment-76391689
return _ensure_proper_types(res, encoding, force_types) | def function[_do_parse, parameter[inp, fmt, encoding, force_types]]:
constant[Actually parse input.
Args:
inp: bytes yielding file-like object
fmt: format to use for parsing
encoding: encoding of `inp`
force_types:
if `True`, integers, floats, booleans and none/null
are recognized and returned as proper types instead of strings;
if `False`, everything is converted to strings
if `None`, backend return value is used
Returns:
parsed `inp` (dict or list) containing unicode values
Raises:
various sorts of errors raised by used libraries while parsing
]
variable[res] assign[=] dictionary[[], []]
call[name[_check_lib_installed], parameter[name[fmt], constant[parse]]]
if compare[name[fmt] equal[==] constant[ini]] begin[:]
variable[cfg] assign[=] call[name[configobj].ConfigObj, parameter[name[inp]]]
variable[res] assign[=] call[name[cfg].dict, parameter[]]
return[call[name[_ensure_proper_types], parameter[name[res], name[encoding], name[force_types]]]] | keyword[def] identifier[_do_parse] ( identifier[inp] , identifier[fmt] , identifier[encoding] , identifier[force_types] ):
literal[string]
identifier[res] ={}
identifier[_check_lib_installed] ( identifier[fmt] , literal[string] )
keyword[if] identifier[fmt] == literal[string] :
identifier[cfg] = identifier[configobj] . identifier[ConfigObj] ( identifier[inp] , identifier[encoding] = identifier[encoding] )
identifier[res] = identifier[cfg] . identifier[dict] ()
keyword[elif] identifier[fmt] == literal[string] :
keyword[if] identifier[six] . identifier[PY3] :
identifier[inp] = identifier[io] . identifier[TextIOWrapper] ( identifier[inp] , identifier[encoding] = identifier[encoding] )
identifier[res] = identifier[json] . identifier[load] ( identifier[inp] , identifier[encoding] = identifier[encoding] )
keyword[elif] identifier[fmt] == literal[string] :
keyword[if] identifier[six] . identifier[PY3] :
identifier[inp] = identifier[io] . identifier[TextIOWrapper] ( identifier[inp] , identifier[encoding] = identifier[encoding] )
identifier[res] = identifier[json5] . identifier[load] ( identifier[inp] , identifier[encoding] = identifier[encoding] )
keyword[elif] identifier[fmt] == literal[string] :
keyword[if] keyword[not] identifier[_is_utf8] ( identifier[encoding] ):
keyword[raise] identifier[AnyMarkupError] ( literal[string] )
keyword[if] identifier[six] . identifier[PY3] :
identifier[inp] = identifier[io] . identifier[TextIOWrapper] ( identifier[inp] , identifier[encoding] = identifier[encoding] )
identifier[res] = identifier[toml] . identifier[load] ( identifier[inp] )
keyword[elif] identifier[fmt] == literal[string] :
identifier[res] = identifier[xmltodict] . identifier[parse] ( identifier[inp] , identifier[encoding] = identifier[encoding] )
keyword[elif] identifier[fmt] == literal[string] :
identifier[res] = identifier[yaml] . identifier[safe_load] ( identifier[inp] )
keyword[else] :
keyword[raise]
keyword[return] identifier[_ensure_proper_types] ( identifier[res] , identifier[encoding] , identifier[force_types] ) | def _do_parse(inp, fmt, encoding, force_types):
"""Actually parse input.
Args:
inp: bytes yielding file-like object
fmt: format to use for parsing
encoding: encoding of `inp`
force_types:
if `True`, integers, floats, booleans and none/null
are recognized and returned as proper types instead of strings;
if `False`, everything is converted to strings
if `None`, backend return value is used
Returns:
parsed `inp` (dict or list) containing unicode values
Raises:
various sorts of errors raised by used libraries while parsing
"""
res = {}
_check_lib_installed(fmt, 'parse')
if fmt == 'ini':
cfg = configobj.ConfigObj(inp, encoding=encoding)
res = cfg.dict() # depends on [control=['if'], data=[]]
elif fmt == 'json':
if six.PY3:
# python 3 json only reads from unicode objects
inp = io.TextIOWrapper(inp, encoding=encoding) # depends on [control=['if'], data=[]]
res = json.load(inp, encoding=encoding) # depends on [control=['if'], data=[]]
elif fmt == 'json5':
if six.PY3:
inp = io.TextIOWrapper(inp, encoding=encoding) # depends on [control=['if'], data=[]]
res = json5.load(inp, encoding=encoding) # depends on [control=['if'], data=[]]
elif fmt == 'toml':
if not _is_utf8(encoding):
raise AnyMarkupError('toml is always utf-8 encoded according to specification') # depends on [control=['if'], data=[]]
if six.PY3:
# python 3 toml prefers unicode objects
inp = io.TextIOWrapper(inp, encoding=encoding) # depends on [control=['if'], data=[]]
res = toml.load(inp) # depends on [control=['if'], data=[]]
elif fmt == 'xml':
res = xmltodict.parse(inp, encoding=encoding) # depends on [control=['if'], data=[]]
elif fmt == 'yaml':
# guesses encoding by its own, there seems to be no way to pass
# it explicitly
res = yaml.safe_load(inp) # depends on [control=['if'], data=[]]
else:
raise # unknown format
# make sure it's all unicode and all int/float values were parsed correctly
# the unicode part is here because of yaml on PY2 and also as workaround for
# https://github.com/DiffSK/configobj/issues/18#issuecomment-76391689
return _ensure_proper_types(res, encoding, force_types) |
def remove_monitor(self, handle):
"""Remove a previously registered monitor.
See :meth:`AbstractDeviceAdapter.adjust_monitor`.
"""
action = (handle, "delete", None, None)
if self._currently_notifying:
self._deferred_adjustments.append(action)
else:
self._adjust_monitor_internal(*action) | def function[remove_monitor, parameter[self, handle]]:
constant[Remove a previously registered monitor.
See :meth:`AbstractDeviceAdapter.adjust_monitor`.
]
variable[action] assign[=] tuple[[<ast.Name object at 0x7da18fe92470>, <ast.Constant object at 0x7da18fe91a20>, <ast.Constant object at 0x7da18fe90070>, <ast.Constant object at 0x7da18fe913c0>]]
if name[self]._currently_notifying begin[:]
call[name[self]._deferred_adjustments.append, parameter[name[action]]] | keyword[def] identifier[remove_monitor] ( identifier[self] , identifier[handle] ):
literal[string]
identifier[action] =( identifier[handle] , literal[string] , keyword[None] , keyword[None] )
keyword[if] identifier[self] . identifier[_currently_notifying] :
identifier[self] . identifier[_deferred_adjustments] . identifier[append] ( identifier[action] )
keyword[else] :
identifier[self] . identifier[_adjust_monitor_internal] (* identifier[action] ) | def remove_monitor(self, handle):
"""Remove a previously registered monitor.
See :meth:`AbstractDeviceAdapter.adjust_monitor`.
"""
action = (handle, 'delete', None, None)
if self._currently_notifying:
self._deferred_adjustments.append(action) # depends on [control=['if'], data=[]]
else:
self._adjust_monitor_internal(*action) |
def dump_http(method, url, request_headers, response, output_stream):
"""
Dump all headers and response headers into output_stream.
:param request_headers: Dictionary of HTTP request headers.
:param response_headers: Dictionary of HTTP response headers.
:param output_stream: Stream where the request is being dumped at.
"""
# Start header.
output_stream.write('---------START-HTTP---------\n')
# Get parsed url.
parsed_url = urlsplit(url)
# Dump all request headers recursively.
http_path = parsed_url.path
if parsed_url.query:
http_path = http_path + '?' + parsed_url.query
output_stream.write('{0} {1} HTTP/1.1\n'.format(method,
http_path))
for k, v in list(request_headers.items()):
if k is 'authorization':
# Redact signature header value from trace logs.
v = re.sub(r'Signature=([[0-9a-f]+)', 'Signature=*REDACTED*', v)
output_stream.write('{0}: {1}\n'.format(k.title(), v))
# Write a new line.
output_stream.write('\n')
# Write response status code.
output_stream.write('HTTP/1.1 {0}\n'.format(response.status))
# Dump all response headers recursively.
for k, v in list(response.getheaders().items()):
output_stream.write('{0}: {1}\n'.format(k.title(), v))
# For all errors write all the available response body.
if response.status != 200 and \
response.status != 204 and response.status != 206:
output_stream.write('{0}'.format(response.read()))
# End header.
output_stream.write('---------END-HTTP---------\n') | def function[dump_http, parameter[method, url, request_headers, response, output_stream]]:
constant[
Dump all headers and response headers into output_stream.
:param request_headers: Dictionary of HTTP request headers.
:param response_headers: Dictionary of HTTP response headers.
:param output_stream: Stream where the request is being dumped at.
]
call[name[output_stream].write, parameter[constant[---------START-HTTP---------
]]]
variable[parsed_url] assign[=] call[name[urlsplit], parameter[name[url]]]
variable[http_path] assign[=] name[parsed_url].path
if name[parsed_url].query begin[:]
variable[http_path] assign[=] binary_operation[binary_operation[name[http_path] + constant[?]] + name[parsed_url].query]
call[name[output_stream].write, parameter[call[constant[{0} {1} HTTP/1.1
].format, parameter[name[method], name[http_path]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b1db46d0>, <ast.Name object at 0x7da1b1db4cd0>]]] in starred[call[name[list], parameter[call[name[request_headers].items, parameter[]]]]] begin[:]
if compare[name[k] is constant[authorization]] begin[:]
variable[v] assign[=] call[name[re].sub, parameter[constant[Signature=([[0-9a-f]+)], constant[Signature=*REDACTED*], name[v]]]
call[name[output_stream].write, parameter[call[constant[{0}: {1}
].format, parameter[call[name[k].title, parameter[]], name[v]]]]]
call[name[output_stream].write, parameter[constant[
]]]
call[name[output_stream].write, parameter[call[constant[HTTP/1.1 {0}
].format, parameter[name[response].status]]]]
for taget[tuple[[<ast.Name object at 0x7da20c993c70>, <ast.Name object at 0x7da20c9933d0>]]] in starred[call[name[list], parameter[call[call[name[response].getheaders, parameter[]].items, parameter[]]]]] begin[:]
call[name[output_stream].write, parameter[call[constant[{0}: {1}
].format, parameter[call[name[k].title, parameter[]], name[v]]]]]
if <ast.BoolOp object at 0x7da1b22e88b0> begin[:]
call[name[output_stream].write, parameter[call[constant[{0}].format, parameter[call[name[response].read, parameter[]]]]]]
call[name[output_stream].write, parameter[constant[---------END-HTTP---------
]]] | keyword[def] identifier[dump_http] ( identifier[method] , identifier[url] , identifier[request_headers] , identifier[response] , identifier[output_stream] ):
literal[string]
identifier[output_stream] . identifier[write] ( literal[string] )
identifier[parsed_url] = identifier[urlsplit] ( identifier[url] )
identifier[http_path] = identifier[parsed_url] . identifier[path]
keyword[if] identifier[parsed_url] . identifier[query] :
identifier[http_path] = identifier[http_path] + literal[string] + identifier[parsed_url] . identifier[query]
identifier[output_stream] . identifier[write] ( literal[string] . identifier[format] ( identifier[method] ,
identifier[http_path] ))
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[list] ( identifier[request_headers] . identifier[items] ()):
keyword[if] identifier[k] keyword[is] literal[string] :
identifier[v] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[v] )
identifier[output_stream] . identifier[write] ( literal[string] . identifier[format] ( identifier[k] . identifier[title] (), identifier[v] ))
identifier[output_stream] . identifier[write] ( literal[string] )
identifier[output_stream] . identifier[write] ( literal[string] . identifier[format] ( identifier[response] . identifier[status] ))
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[list] ( identifier[response] . identifier[getheaders] (). identifier[items] ()):
identifier[output_stream] . identifier[write] ( literal[string] . identifier[format] ( identifier[k] . identifier[title] (), identifier[v] ))
keyword[if] identifier[response] . identifier[status] != literal[int] keyword[and] identifier[response] . identifier[status] != literal[int] keyword[and] identifier[response] . identifier[status] != literal[int] :
identifier[output_stream] . identifier[write] ( literal[string] . identifier[format] ( identifier[response] . identifier[read] ()))
identifier[output_stream] . identifier[write] ( literal[string] ) | def dump_http(method, url, request_headers, response, output_stream):
"""
Dump all headers and response headers into output_stream.
:param request_headers: Dictionary of HTTP request headers.
:param response_headers: Dictionary of HTTP response headers.
:param output_stream: Stream where the request is being dumped at.
"""
# Start header.
output_stream.write('---------START-HTTP---------\n')
# Get parsed url.
parsed_url = urlsplit(url)
# Dump all request headers recursively.
http_path = parsed_url.path
if parsed_url.query:
http_path = http_path + '?' + parsed_url.query # depends on [control=['if'], data=[]]
output_stream.write('{0} {1} HTTP/1.1\n'.format(method, http_path))
for (k, v) in list(request_headers.items()):
if k is 'authorization':
# Redact signature header value from trace logs.
v = re.sub('Signature=([[0-9a-f]+)', 'Signature=*REDACTED*', v) # depends on [control=['if'], data=[]]
output_stream.write('{0}: {1}\n'.format(k.title(), v)) # depends on [control=['for'], data=[]]
# Write a new line.
output_stream.write('\n')
# Write response status code.
output_stream.write('HTTP/1.1 {0}\n'.format(response.status))
# Dump all response headers recursively.
for (k, v) in list(response.getheaders().items()):
output_stream.write('{0}: {1}\n'.format(k.title(), v)) # depends on [control=['for'], data=[]]
# For all errors write all the available response body.
if response.status != 200 and response.status != 204 and (response.status != 206):
output_stream.write('{0}'.format(response.read())) # depends on [control=['if'], data=[]]
# End header.
output_stream.write('---------END-HTTP---------\n') |
def _configDevRequests(self, namestr, titlestr, devlist):
"""Generate configuration for I/O Request stats.
@param namestr: Field name component indicating device type.
@param titlestr: Title component indicating device type.
@param devlist: List of devices.
"""
name = 'diskio_%s_requests' % namestr
if self.graphEnabled(name):
graph = MuninGraph('Disk I/O - %s - Requests' % titlestr, self._category,
info='Disk I/O - %s Throughput, Read / write requests per second.'
% titlestr,
args='--base 1000 --lower-limit 0',
vlabel='reqs/sec read (-) / write (+)', printf='%6.1lf',
autoFixNames = True)
for dev in devlist:
graph.addField(dev + '_read',
fixLabel(dev, maxLabelLenGraphDual,
repl = '..', truncend=False,
delim = self._labelDelim.get(namestr)),
draw='LINE2', type='DERIVE', min=0, graph=False)
graph.addField(dev + '_write',
fixLabel(dev, maxLabelLenGraphDual,
repl = '..', truncend=False,
delim = self._labelDelim.get(namestr)),
draw='LINE2', type='DERIVE', min=0,
negative=(dev + '_read'),info=dev)
self.appendGraph(name, graph) | def function[_configDevRequests, parameter[self, namestr, titlestr, devlist]]:
constant[Generate configuration for I/O Request stats.
@param namestr: Field name component indicating device type.
@param titlestr: Title component indicating device type.
@param devlist: List of devices.
]
variable[name] assign[=] binary_operation[constant[diskio_%s_requests] <ast.Mod object at 0x7da2590d6920> name[namestr]]
if call[name[self].graphEnabled, parameter[name[name]]] begin[:]
variable[graph] assign[=] call[name[MuninGraph], parameter[binary_operation[constant[Disk I/O - %s - Requests] <ast.Mod object at 0x7da2590d6920> name[titlestr]], name[self]._category]]
for taget[name[dev]] in starred[name[devlist]] begin[:]
call[name[graph].addField, parameter[binary_operation[name[dev] + constant[_read]], call[name[fixLabel], parameter[name[dev], name[maxLabelLenGraphDual]]]]]
call[name[graph].addField, parameter[binary_operation[name[dev] + constant[_write]], call[name[fixLabel], parameter[name[dev], name[maxLabelLenGraphDual]]]]]
call[name[self].appendGraph, parameter[name[name], name[graph]]] | keyword[def] identifier[_configDevRequests] ( identifier[self] , identifier[namestr] , identifier[titlestr] , identifier[devlist] ):
literal[string]
identifier[name] = literal[string] % identifier[namestr]
keyword[if] identifier[self] . identifier[graphEnabled] ( identifier[name] ):
identifier[graph] = identifier[MuninGraph] ( literal[string] % identifier[titlestr] , identifier[self] . identifier[_category] ,
identifier[info] = literal[string]
% identifier[titlestr] ,
identifier[args] = literal[string] ,
identifier[vlabel] = literal[string] , identifier[printf] = literal[string] ,
identifier[autoFixNames] = keyword[True] )
keyword[for] identifier[dev] keyword[in] identifier[devlist] :
identifier[graph] . identifier[addField] ( identifier[dev] + literal[string] ,
identifier[fixLabel] ( identifier[dev] , identifier[maxLabelLenGraphDual] ,
identifier[repl] = literal[string] , identifier[truncend] = keyword[False] ,
identifier[delim] = identifier[self] . identifier[_labelDelim] . identifier[get] ( identifier[namestr] )),
identifier[draw] = literal[string] , identifier[type] = literal[string] , identifier[min] = literal[int] , identifier[graph] = keyword[False] )
identifier[graph] . identifier[addField] ( identifier[dev] + literal[string] ,
identifier[fixLabel] ( identifier[dev] , identifier[maxLabelLenGraphDual] ,
identifier[repl] = literal[string] , identifier[truncend] = keyword[False] ,
identifier[delim] = identifier[self] . identifier[_labelDelim] . identifier[get] ( identifier[namestr] )),
identifier[draw] = literal[string] , identifier[type] = literal[string] , identifier[min] = literal[int] ,
identifier[negative] =( identifier[dev] + literal[string] ), identifier[info] = identifier[dev] )
identifier[self] . identifier[appendGraph] ( identifier[name] , identifier[graph] ) | def _configDevRequests(self, namestr, titlestr, devlist):
"""Generate configuration for I/O Request stats.
@param namestr: Field name component indicating device type.
@param titlestr: Title component indicating device type.
@param devlist: List of devices.
"""
name = 'diskio_%s_requests' % namestr
if self.graphEnabled(name):
graph = MuninGraph('Disk I/O - %s - Requests' % titlestr, self._category, info='Disk I/O - %s Throughput, Read / write requests per second.' % titlestr, args='--base 1000 --lower-limit 0', vlabel='reqs/sec read (-) / write (+)', printf='%6.1lf', autoFixNames=True)
for dev in devlist:
graph.addField(dev + '_read', fixLabel(dev, maxLabelLenGraphDual, repl='..', truncend=False, delim=self._labelDelim.get(namestr)), draw='LINE2', type='DERIVE', min=0, graph=False)
graph.addField(dev + '_write', fixLabel(dev, maxLabelLenGraphDual, repl='..', truncend=False, delim=self._labelDelim.get(namestr)), draw='LINE2', type='DERIVE', min=0, negative=dev + '_read', info=dev) # depends on [control=['for'], data=['dev']]
self.appendGraph(name, graph) # depends on [control=['if'], data=[]] |
def join_path(self, *path):
""" Unite entries to generate a single path
:param path: path items to unite
:return: str
"""
path = self.directory_sep().join(path)
return self.normalize_path(path) | def function[join_path, parameter[self]]:
constant[ Unite entries to generate a single path
:param path: path items to unite
:return: str
]
variable[path] assign[=] call[call[name[self].directory_sep, parameter[]].join, parameter[name[path]]]
return[call[name[self].normalize_path, parameter[name[path]]]] | keyword[def] identifier[join_path] ( identifier[self] ,* identifier[path] ):
literal[string]
identifier[path] = identifier[self] . identifier[directory_sep] (). identifier[join] ( identifier[path] )
keyword[return] identifier[self] . identifier[normalize_path] ( identifier[path] ) | def join_path(self, *path):
""" Unite entries to generate a single path
:param path: path items to unite
:return: str
"""
path = self.directory_sep().join(path)
return self.normalize_path(path) |
def parse_image_name(self, image):
'''
simply split the uri from the image. Singularity handles
parsing of registry, namespace, image.
Parameters
==========
image: the complete image uri to load (e.g., docker://ubuntu)
'''
self._image = image
self.uri = self.get_uri(image)
self.image = self.remove_uri(image) | def function[parse_image_name, parameter[self, image]]:
constant[
simply split the uri from the image. Singularity handles
parsing of registry, namespace, image.
Parameters
==========
image: the complete image uri to load (e.g., docker://ubuntu)
]
name[self]._image assign[=] name[image]
name[self].uri assign[=] call[name[self].get_uri, parameter[name[image]]]
name[self].image assign[=] call[name[self].remove_uri, parameter[name[image]]] | keyword[def] identifier[parse_image_name] ( identifier[self] , identifier[image] ):
literal[string]
identifier[self] . identifier[_image] = identifier[image]
identifier[self] . identifier[uri] = identifier[self] . identifier[get_uri] ( identifier[image] )
identifier[self] . identifier[image] = identifier[self] . identifier[remove_uri] ( identifier[image] ) | def parse_image_name(self, image):
"""
simply split the uri from the image. Singularity handles
parsing of registry, namespace, image.
Parameters
==========
image: the complete image uri to load (e.g., docker://ubuntu)
"""
self._image = image
self.uri = self.get_uri(image)
self.image = self.remove_uri(image) |
def hil_optical_flow_encode(self, time_usec, sensor_id, integration_time_us, integrated_x, integrated_y, integrated_xgyro, integrated_ygyro, integrated_zgyro, temperature, quality, time_delta_distance_us, distance):
'''
Simulated optical flow from a flow sensor (e.g. PX4FLOW or optical
mouse sensor)
time_usec : Timestamp (microseconds, synced to UNIX time or since system boot) (uint64_t)
sensor_id : Sensor ID (uint8_t)
integration_time_us : Integration time in microseconds. Divide integrated_x and integrated_y by the integration time to obtain average flow. The integration time also indicates the. (uint32_t)
integrated_x : Flow in radians around X axis (Sensor RH rotation about the X axis induces a positive flow. Sensor linear motion along the positive Y axis induces a negative flow.) (float)
integrated_y : Flow in radians around Y axis (Sensor RH rotation about the Y axis induces a positive flow. Sensor linear motion along the positive X axis induces a positive flow.) (float)
integrated_xgyro : RH rotation around X axis (rad) (float)
integrated_ygyro : RH rotation around Y axis (rad) (float)
integrated_zgyro : RH rotation around Z axis (rad) (float)
temperature : Temperature * 100 in centi-degrees Celsius (int16_t)
quality : Optical flow quality / confidence. 0: no valid flow, 255: maximum quality (uint8_t)
time_delta_distance_us : Time in microseconds since the distance was sampled. (uint32_t)
distance : Distance to the center of the flow field in meters. Positive value (including zero): distance known. Negative value: Unknown distance. (float)
'''
return MAVLink_hil_optical_flow_message(time_usec, sensor_id, integration_time_us, integrated_x, integrated_y, integrated_xgyro, integrated_ygyro, integrated_zgyro, temperature, quality, time_delta_distance_us, distance) | def function[hil_optical_flow_encode, parameter[self, time_usec, sensor_id, integration_time_us, integrated_x, integrated_y, integrated_xgyro, integrated_ygyro, integrated_zgyro, temperature, quality, time_delta_distance_us, distance]]:
constant[
Simulated optical flow from a flow sensor (e.g. PX4FLOW or optical
mouse sensor)
time_usec : Timestamp (microseconds, synced to UNIX time or since system boot) (uint64_t)
sensor_id : Sensor ID (uint8_t)
integration_time_us : Integration time in microseconds. Divide integrated_x and integrated_y by the integration time to obtain average flow. The integration time also indicates the. (uint32_t)
integrated_x : Flow in radians around X axis (Sensor RH rotation about the X axis induces a positive flow. Sensor linear motion along the positive Y axis induces a negative flow.) (float)
integrated_y : Flow in radians around Y axis (Sensor RH rotation about the Y axis induces a positive flow. Sensor linear motion along the positive X axis induces a positive flow.) (float)
integrated_xgyro : RH rotation around X axis (rad) (float)
integrated_ygyro : RH rotation around Y axis (rad) (float)
integrated_zgyro : RH rotation around Z axis (rad) (float)
temperature : Temperature * 100 in centi-degrees Celsius (int16_t)
quality : Optical flow quality / confidence. 0: no valid flow, 255: maximum quality (uint8_t)
time_delta_distance_us : Time in microseconds since the distance was sampled. (uint32_t)
distance : Distance to the center of the flow field in meters. Positive value (including zero): distance known. Negative value: Unknown distance. (float)
]
return[call[name[MAVLink_hil_optical_flow_message], parameter[name[time_usec], name[sensor_id], name[integration_time_us], name[integrated_x], name[integrated_y], name[integrated_xgyro], name[integrated_ygyro], name[integrated_zgyro], name[temperature], name[quality], name[time_delta_distance_us], name[distance]]]] | keyword[def] identifier[hil_optical_flow_encode] ( identifier[self] , identifier[time_usec] , identifier[sensor_id] , identifier[integration_time_us] , identifier[integrated_x] , identifier[integrated_y] , identifier[integrated_xgyro] , identifier[integrated_ygyro] , identifier[integrated_zgyro] , identifier[temperature] , identifier[quality] , identifier[time_delta_distance_us] , identifier[distance] ):
literal[string]
keyword[return] identifier[MAVLink_hil_optical_flow_message] ( identifier[time_usec] , identifier[sensor_id] , identifier[integration_time_us] , identifier[integrated_x] , identifier[integrated_y] , identifier[integrated_xgyro] , identifier[integrated_ygyro] , identifier[integrated_zgyro] , identifier[temperature] , identifier[quality] , identifier[time_delta_distance_us] , identifier[distance] ) | def hil_optical_flow_encode(self, time_usec, sensor_id, integration_time_us, integrated_x, integrated_y, integrated_xgyro, integrated_ygyro, integrated_zgyro, temperature, quality, time_delta_distance_us, distance):
"""
Simulated optical flow from a flow sensor (e.g. PX4FLOW or optical
mouse sensor)
time_usec : Timestamp (microseconds, synced to UNIX time or since system boot) (uint64_t)
sensor_id : Sensor ID (uint8_t)
integration_time_us : Integration time in microseconds. Divide integrated_x and integrated_y by the integration time to obtain average flow. The integration time also indicates the. (uint32_t)
integrated_x : Flow in radians around X axis (Sensor RH rotation about the X axis induces a positive flow. Sensor linear motion along the positive Y axis induces a negative flow.) (float)
integrated_y : Flow in radians around Y axis (Sensor RH rotation about the Y axis induces a positive flow. Sensor linear motion along the positive X axis induces a positive flow.) (float)
integrated_xgyro : RH rotation around X axis (rad) (float)
integrated_ygyro : RH rotation around Y axis (rad) (float)
integrated_zgyro : RH rotation around Z axis (rad) (float)
temperature : Temperature * 100 in centi-degrees Celsius (int16_t)
quality : Optical flow quality / confidence. 0: no valid flow, 255: maximum quality (uint8_t)
time_delta_distance_us : Time in microseconds since the distance was sampled. (uint32_t)
distance : Distance to the center of the flow field in meters. Positive value (including zero): distance known. Negative value: Unknown distance. (float)
"""
return MAVLink_hil_optical_flow_message(time_usec, sensor_id, integration_time_us, integrated_x, integrated_y, integrated_xgyro, integrated_ygyro, integrated_zgyro, temperature, quality, time_delta_distance_us, distance) |
def powerUp(self, powerup, interface=None, priority=0):
"""
Installs a powerup (e.g. plugin) on an item or store.
Powerups will be returned in an iterator when queried for using the
'powerupsFor' method. Normally they will be returned in order of
installation [this may change in future versions, so please don't
depend on it]. Higher priorities are returned first. If you have
something that should run before "normal" powerups, pass
POWERUP_BEFORE; if you have something that should run after, pass
POWERUP_AFTER. We suggest not depending too heavily on order of
execution of your powerups, but if finer-grained control is necessary
you may pass any integer. Normal (unspecified) priority is zero.
Powerups will only be installed once on a given item. If you install a
powerup for a given interface with priority 1, then again with priority
30, the powerup will be adjusted to priority 30 but future calls to
powerupFor will still only return that powerup once.
If no interface or priority are specified, and the class of the
powerup has a "powerupInterfaces" attribute (containing
either a sequence of interfaces, or a sequence of
(interface, priority) tuples), this object will be powered up
with the powerup object on those interfaces.
If no interface or priority are specified and the powerup has
a "__getPowerupInterfaces__" method, it will be called with
an iterable of (interface, priority) tuples, collected from the
"powerupInterfaces" attribute described above. The iterable of
(interface, priority) tuples it returns will then be
installed.
@param powerup: an Item that implements C{interface} (if specified)
@param interface: a zope interface, or None
@param priority: An int; preferably either POWERUP_BEFORE,
POWERUP_AFTER, or unspecified.
@raise TypeError: raises if interface is IPowerupIndirector You may not
install a powerup for IPowerupIndirector because that would be
nonsensical.
"""
if interface is None:
for iface, priority in powerup._getPowerupInterfaces():
self.powerUp(powerup, iface, priority)
elif interface is IPowerupIndirector:
raise TypeError(
"You cannot install a powerup for IPowerupIndirector: " +
powerup)
else:
forc = self.store.findOrCreate(_PowerupConnector,
item=self,
interface=unicode(qual(interface)),
powerup=powerup)
forc.priority = priority | def function[powerUp, parameter[self, powerup, interface, priority]]:
constant[
Installs a powerup (e.g. plugin) on an item or store.
Powerups will be returned in an iterator when queried for using the
'powerupsFor' method. Normally they will be returned in order of
installation [this may change in future versions, so please don't
depend on it]. Higher priorities are returned first. If you have
something that should run before "normal" powerups, pass
POWERUP_BEFORE; if you have something that should run after, pass
POWERUP_AFTER. We suggest not depending too heavily on order of
execution of your powerups, but if finer-grained control is necessary
you may pass any integer. Normal (unspecified) priority is zero.
Powerups will only be installed once on a given item. If you install a
powerup for a given interface with priority 1, then again with priority
30, the powerup will be adjusted to priority 30 but future calls to
powerupFor will still only return that powerup once.
If no interface or priority are specified, and the class of the
powerup has a "powerupInterfaces" attribute (containing
either a sequence of interfaces, or a sequence of
(interface, priority) tuples), this object will be powered up
with the powerup object on those interfaces.
If no interface or priority are specified and the powerup has
a "__getPowerupInterfaces__" method, it will be called with
an iterable of (interface, priority) tuples, collected from the
"powerupInterfaces" attribute described above. The iterable of
(interface, priority) tuples it returns will then be
installed.
@param powerup: an Item that implements C{interface} (if specified)
@param interface: a zope interface, or None
@param priority: An int; preferably either POWERUP_BEFORE,
POWERUP_AFTER, or unspecified.
@raise TypeError: raises if interface is IPowerupIndirector You may not
install a powerup for IPowerupIndirector because that would be
nonsensical.
]
if compare[name[interface] is constant[None]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0d69a80>, <ast.Name object at 0x7da1b0d69960>]]] in starred[call[name[powerup]._getPowerupInterfaces, parameter[]]] begin[:]
call[name[self].powerUp, parameter[name[powerup], name[iface], name[priority]]] | keyword[def] identifier[powerUp] ( identifier[self] , identifier[powerup] , identifier[interface] = keyword[None] , identifier[priority] = literal[int] ):
literal[string]
keyword[if] identifier[interface] keyword[is] keyword[None] :
keyword[for] identifier[iface] , identifier[priority] keyword[in] identifier[powerup] . identifier[_getPowerupInterfaces] ():
identifier[self] . identifier[powerUp] ( identifier[powerup] , identifier[iface] , identifier[priority] )
keyword[elif] identifier[interface] keyword[is] identifier[IPowerupIndirector] :
keyword[raise] identifier[TypeError] (
literal[string] +
identifier[powerup] )
keyword[else] :
identifier[forc] = identifier[self] . identifier[store] . identifier[findOrCreate] ( identifier[_PowerupConnector] ,
identifier[item] = identifier[self] ,
identifier[interface] = identifier[unicode] ( identifier[qual] ( identifier[interface] )),
identifier[powerup] = identifier[powerup] )
identifier[forc] . identifier[priority] = identifier[priority] | def powerUp(self, powerup, interface=None, priority=0):
"""
Installs a powerup (e.g. plugin) on an item or store.
Powerups will be returned in an iterator when queried for using the
'powerupsFor' method. Normally they will be returned in order of
installation [this may change in future versions, so please don't
depend on it]. Higher priorities are returned first. If you have
something that should run before "normal" powerups, pass
POWERUP_BEFORE; if you have something that should run after, pass
POWERUP_AFTER. We suggest not depending too heavily on order of
execution of your powerups, but if finer-grained control is necessary
you may pass any integer. Normal (unspecified) priority is zero.
Powerups will only be installed once on a given item. If you install a
powerup for a given interface with priority 1, then again with priority
30, the powerup will be adjusted to priority 30 but future calls to
powerupFor will still only return that powerup once.
If no interface or priority are specified, and the class of the
powerup has a "powerupInterfaces" attribute (containing
either a sequence of interfaces, or a sequence of
(interface, priority) tuples), this object will be powered up
with the powerup object on those interfaces.
If no interface or priority are specified and the powerup has
a "__getPowerupInterfaces__" method, it will be called with
an iterable of (interface, priority) tuples, collected from the
"powerupInterfaces" attribute described above. The iterable of
(interface, priority) tuples it returns will then be
installed.
@param powerup: an Item that implements C{interface} (if specified)
@param interface: a zope interface, or None
@param priority: An int; preferably either POWERUP_BEFORE,
POWERUP_AFTER, or unspecified.
@raise TypeError: raises if interface is IPowerupIndirector You may not
install a powerup for IPowerupIndirector because that would be
nonsensical.
"""
if interface is None:
for (iface, priority) in powerup._getPowerupInterfaces():
self.powerUp(powerup, iface, priority) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif interface is IPowerupIndirector:
raise TypeError('You cannot install a powerup for IPowerupIndirector: ' + powerup) # depends on [control=['if'], data=[]]
else:
forc = self.store.findOrCreate(_PowerupConnector, item=self, interface=unicode(qual(interface)), powerup=powerup)
forc.priority = priority |
def join_in(self, *objs):
"""
Create a join condition, connect A and C
"""
if not objs:
return self.table.c[self.fielda]!=self.table.c[self.fielda]
else:
keys = get_objs_columns(objs, self.reference_fieldname)
return (self.table.c[self.fielda] == self.model_class.c[self.reversed_fieldname]) & (self.table.c[self.fieldb].in_(keys)) | def function[join_in, parameter[self]]:
constant[
Create a join condition, connect A and C
]
if <ast.UnaryOp object at 0x7da2054a7730> begin[:]
return[compare[call[name[self].table.c][name[self].fielda] not_equal[!=] call[name[self].table.c][name[self].fielda]]] | keyword[def] identifier[join_in] ( identifier[self] ,* identifier[objs] ):
literal[string]
keyword[if] keyword[not] identifier[objs] :
keyword[return] identifier[self] . identifier[table] . identifier[c] [ identifier[self] . identifier[fielda] ]!= identifier[self] . identifier[table] . identifier[c] [ identifier[self] . identifier[fielda] ]
keyword[else] :
identifier[keys] = identifier[get_objs_columns] ( identifier[objs] , identifier[self] . identifier[reference_fieldname] )
keyword[return] ( identifier[self] . identifier[table] . identifier[c] [ identifier[self] . identifier[fielda] ]== identifier[self] . identifier[model_class] . identifier[c] [ identifier[self] . identifier[reversed_fieldname] ])&( identifier[self] . identifier[table] . identifier[c] [ identifier[self] . identifier[fieldb] ]. identifier[in_] ( identifier[keys] )) | def join_in(self, *objs):
"""
Create a join condition, connect A and C
"""
if not objs:
return self.table.c[self.fielda] != self.table.c[self.fielda] # depends on [control=['if'], data=[]]
else:
keys = get_objs_columns(objs, self.reference_fieldname)
return (self.table.c[self.fielda] == self.model_class.c[self.reversed_fieldname]) & self.table.c[self.fieldb].in_(keys) |
def _check_color_dim(val):
"""Ensure val is Nx(n_col), usually Nx3"""
val = np.atleast_2d(val)
if val.shape[1] not in (3, 4):
raise RuntimeError('Value must have second dimension of size 3 or 4')
return val, val.shape[1] | def function[_check_color_dim, parameter[val]]:
constant[Ensure val is Nx(n_col), usually Nx3]
variable[val] assign[=] call[name[np].atleast_2d, parameter[name[val]]]
if compare[call[name[val].shape][constant[1]] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da1b0ea27d0>, <ast.Constant object at 0x7da1b0ea0490>]]] begin[:]
<ast.Raise object at 0x7da1b0ea1420>
return[tuple[[<ast.Name object at 0x7da1b0ea1bd0>, <ast.Subscript object at 0x7da1b0ea2110>]]] | keyword[def] identifier[_check_color_dim] ( identifier[val] ):
literal[string]
identifier[val] = identifier[np] . identifier[atleast_2d] ( identifier[val] )
keyword[if] identifier[val] . identifier[shape] [ literal[int] ] keyword[not] keyword[in] ( literal[int] , literal[int] ):
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[return] identifier[val] , identifier[val] . identifier[shape] [ literal[int] ] | def _check_color_dim(val):
"""Ensure val is Nx(n_col), usually Nx3"""
val = np.atleast_2d(val)
if val.shape[1] not in (3, 4):
raise RuntimeError('Value must have second dimension of size 3 or 4') # depends on [control=['if'], data=[]]
return (val, val.shape[1]) |
def profile(self, num):
"""Search for company profile by company number.
Args:
num (str): Company number to search on.
"""
baseuri = self._BASE_URI + "company/{}".format(num)
res = self.session.get(baseuri)
self.handle_http_error(res)
return res | def function[profile, parameter[self, num]]:
constant[Search for company profile by company number.
Args:
num (str): Company number to search on.
]
variable[baseuri] assign[=] binary_operation[name[self]._BASE_URI + call[constant[company/{}].format, parameter[name[num]]]]
variable[res] assign[=] call[name[self].session.get, parameter[name[baseuri]]]
call[name[self].handle_http_error, parameter[name[res]]]
return[name[res]] | keyword[def] identifier[profile] ( identifier[self] , identifier[num] ):
literal[string]
identifier[baseuri] = identifier[self] . identifier[_BASE_URI] + literal[string] . identifier[format] ( identifier[num] )
identifier[res] = identifier[self] . identifier[session] . identifier[get] ( identifier[baseuri] )
identifier[self] . identifier[handle_http_error] ( identifier[res] )
keyword[return] identifier[res] | def profile(self, num):
"""Search for company profile by company number.
Args:
num (str): Company number to search on.
"""
baseuri = self._BASE_URI + 'company/{}'.format(num)
res = self.session.get(baseuri)
self.handle_http_error(res)
return res |
def _config_logs(lvl=None, name=None):
"""
Set up or change logging configuration.
_config_logs() => idempotent setup;
_config_logs(L) => change log level
"""
# print('_config_log', 'from %s' %name if name else '')
FORMAT = '%(message)s'
# maybe better for log files
# FORMAT='[%(levelname)s]:%(message)s',
# Reset handlers
for h in list(logging.root.handlers):
logging.root.removeHandler(h)
global _log_level
if lvl: _log_level = lvl
logging.basicConfig(level=_log_level, format=FORMAT, stream=sys.stdout)
_log = logging.getLogger(__name__)
_log.setLevel(_log_level)
# external
for log in ['urllib3', 'asyncio']:
logging.getLogger(log).setLevel(_log_level) | def function[_config_logs, parameter[lvl, name]]:
constant[
Set up or change logging configuration.
_config_logs() => idempotent setup;
_config_logs(L) => change log level
]
variable[FORMAT] assign[=] constant[%(message)s]
for taget[name[h]] in starred[call[name[list], parameter[name[logging].root.handlers]]] begin[:]
call[name[logging].root.removeHandler, parameter[name[h]]]
<ast.Global object at 0x7da2041db370>
if name[lvl] begin[:]
variable[_log_level] assign[=] name[lvl]
call[name[logging].basicConfig, parameter[]]
variable[_log] assign[=] call[name[logging].getLogger, parameter[name[__name__]]]
call[name[_log].setLevel, parameter[name[_log_level]]]
for taget[name[log]] in starred[list[[<ast.Constant object at 0x7da18dc9a950>, <ast.Constant object at 0x7da18dc982b0>]]] begin[:]
call[call[name[logging].getLogger, parameter[name[log]]].setLevel, parameter[name[_log_level]]] | keyword[def] identifier[_config_logs] ( identifier[lvl] = keyword[None] , identifier[name] = keyword[None] ):
literal[string]
identifier[FORMAT] = literal[string]
keyword[for] identifier[h] keyword[in] identifier[list] ( identifier[logging] . identifier[root] . identifier[handlers] ):
identifier[logging] . identifier[root] . identifier[removeHandler] ( identifier[h] )
keyword[global] identifier[_log_level]
keyword[if] identifier[lvl] : identifier[_log_level] = identifier[lvl]
identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[_log_level] , identifier[format] = identifier[FORMAT] , identifier[stream] = identifier[sys] . identifier[stdout] )
identifier[_log] = identifier[logging] . identifier[getLogger] ( identifier[__name__] )
identifier[_log] . identifier[setLevel] ( identifier[_log_level] )
keyword[for] identifier[log] keyword[in] [ literal[string] , literal[string] ]:
identifier[logging] . identifier[getLogger] ( identifier[log] ). identifier[setLevel] ( identifier[_log_level] ) | def _config_logs(lvl=None, name=None):
"""
Set up or change logging configuration.
_config_logs() => idempotent setup;
_config_logs(L) => change log level
"""
# print('_config_log', 'from %s' %name if name else '')
FORMAT = '%(message)s'
# maybe better for log files
# FORMAT='[%(levelname)s]:%(message)s',
# Reset handlers
for h in list(logging.root.handlers):
logging.root.removeHandler(h) # depends on [control=['for'], data=['h']]
global _log_level
if lvl:
_log_level = lvl # depends on [control=['if'], data=[]]
logging.basicConfig(level=_log_level, format=FORMAT, stream=sys.stdout)
_log = logging.getLogger(__name__)
_log.setLevel(_log_level)
# external
for log in ['urllib3', 'asyncio']:
logging.getLogger(log).setLevel(_log_level) # depends on [control=['for'], data=['log']] |
def convert_cifar100(directory, output_directory,
output_filename='cifar100.hdf5'):
"""Converts the CIFAR-100 dataset to HDF5.
Converts the CIFAR-100 dataset to an HDF5 dataset compatible with
:class:`fuel.datasets.CIFAR100`. The converted dataset is saved as
'cifar100.hdf5'.
This method assumes the existence of the following file:
`cifar-100-python.tar.gz`
Parameters
----------
directory : str
Directory in which the required input files reside.
output_directory : str
Directory in which to save the converted dataset.
output_filename : str, optional
Name of the saved dataset. Defaults to 'cifar100.hdf5'.
Returns
-------
output_paths : tuple of str
Single-element tuple containing the path to the converted dataset.
"""
output_path = os.path.join(output_directory, output_filename)
h5file = h5py.File(output_path, mode="w")
input_file = os.path.join(directory, 'cifar-100-python.tar.gz')
tar_file = tarfile.open(input_file, 'r:gz')
file = tar_file.extractfile('cifar-100-python/train')
try:
if six.PY3:
train = cPickle.load(file, encoding='latin1')
else:
train = cPickle.load(file)
finally:
file.close()
train_features = train['data'].reshape(train['data'].shape[0],
3, 32, 32)
train_coarse_labels = numpy.array(train['coarse_labels'],
dtype=numpy.uint8)
train_fine_labels = numpy.array(train['fine_labels'],
dtype=numpy.uint8)
file = tar_file.extractfile('cifar-100-python/test')
try:
if six.PY3:
test = cPickle.load(file, encoding='latin1')
else:
test = cPickle.load(file)
finally:
file.close()
test_features = test['data'].reshape(test['data'].shape[0],
3, 32, 32)
test_coarse_labels = numpy.array(test['coarse_labels'], dtype=numpy.uint8)
test_fine_labels = numpy.array(test['fine_labels'], dtype=numpy.uint8)
data = (('train', 'features', train_features),
('train', 'coarse_labels', train_coarse_labels.reshape((-1, 1))),
('train', 'fine_labels', train_fine_labels.reshape((-1, 1))),
('test', 'features', test_features),
('test', 'coarse_labels', test_coarse_labels.reshape((-1, 1))),
('test', 'fine_labels', test_fine_labels.reshape((-1, 1))))
fill_hdf5_file(h5file, data)
h5file['features'].dims[0].label = 'batch'
h5file['features'].dims[1].label = 'channel'
h5file['features'].dims[2].label = 'height'
h5file['features'].dims[3].label = 'width'
h5file['coarse_labels'].dims[0].label = 'batch'
h5file['coarse_labels'].dims[1].label = 'index'
h5file['fine_labels'].dims[0].label = 'batch'
h5file['fine_labels'].dims[1].label = 'index'
h5file.flush()
h5file.close()
return (output_path,) | def function[convert_cifar100, parameter[directory, output_directory, output_filename]]:
constant[Converts the CIFAR-100 dataset to HDF5.
Converts the CIFAR-100 dataset to an HDF5 dataset compatible with
:class:`fuel.datasets.CIFAR100`. The converted dataset is saved as
'cifar100.hdf5'.
This method assumes the existence of the following file:
`cifar-100-python.tar.gz`
Parameters
----------
directory : str
Directory in which the required input files reside.
output_directory : str
Directory in which to save the converted dataset.
output_filename : str, optional
Name of the saved dataset. Defaults to 'cifar100.hdf5'.
Returns
-------
output_paths : tuple of str
Single-element tuple containing the path to the converted dataset.
]
variable[output_path] assign[=] call[name[os].path.join, parameter[name[output_directory], name[output_filename]]]
variable[h5file] assign[=] call[name[h5py].File, parameter[name[output_path]]]
variable[input_file] assign[=] call[name[os].path.join, parameter[name[directory], constant[cifar-100-python.tar.gz]]]
variable[tar_file] assign[=] call[name[tarfile].open, parameter[name[input_file], constant[r:gz]]]
variable[file] assign[=] call[name[tar_file].extractfile, parameter[constant[cifar-100-python/train]]]
<ast.Try object at 0x7da18bcca710>
variable[train_features] assign[=] call[call[name[train]][constant[data]].reshape, parameter[call[call[name[train]][constant[data]].shape][constant[0]], constant[3], constant[32], constant[32]]]
variable[train_coarse_labels] assign[=] call[name[numpy].array, parameter[call[name[train]][constant[coarse_labels]]]]
variable[train_fine_labels] assign[=] call[name[numpy].array, parameter[call[name[train]][constant[fine_labels]]]]
variable[file] assign[=] call[name[tar_file].extractfile, parameter[constant[cifar-100-python/test]]]
<ast.Try object at 0x7da18bcc8820>
variable[test_features] assign[=] call[call[name[test]][constant[data]].reshape, parameter[call[call[name[test]][constant[data]].shape][constant[0]], constant[3], constant[32], constant[32]]]
variable[test_coarse_labels] assign[=] call[name[numpy].array, parameter[call[name[test]][constant[coarse_labels]]]]
variable[test_fine_labels] assign[=] call[name[numpy].array, parameter[call[name[test]][constant[fine_labels]]]]
variable[data] assign[=] tuple[[<ast.Tuple object at 0x7da18bcc8220>, <ast.Tuple object at 0x7da18bcc9f30>, <ast.Tuple object at 0x7da18bcc8bb0>, <ast.Tuple object at 0x7da18bcc9a50>, <ast.Tuple object at 0x7da18bccbf70>, <ast.Tuple object at 0x7da18bcca740>]]
call[name[fill_hdf5_file], parameter[name[h5file], name[data]]]
call[call[name[h5file]][constant[features]].dims][constant[0]].label assign[=] constant[batch]
call[call[name[h5file]][constant[features]].dims][constant[1]].label assign[=] constant[channel]
call[call[name[h5file]][constant[features]].dims][constant[2]].label assign[=] constant[height]
call[call[name[h5file]][constant[features]].dims][constant[3]].label assign[=] constant[width]
call[call[name[h5file]][constant[coarse_labels]].dims][constant[0]].label assign[=] constant[batch]
call[call[name[h5file]][constant[coarse_labels]].dims][constant[1]].label assign[=] constant[index]
call[call[name[h5file]][constant[fine_labels]].dims][constant[0]].label assign[=] constant[batch]
call[call[name[h5file]][constant[fine_labels]].dims][constant[1]].label assign[=] constant[index]
call[name[h5file].flush, parameter[]]
call[name[h5file].close, parameter[]]
return[tuple[[<ast.Name object at 0x7da18bcc9d50>]]] | keyword[def] identifier[convert_cifar100] ( identifier[directory] , identifier[output_directory] ,
identifier[output_filename] = literal[string] ):
literal[string]
identifier[output_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[output_directory] , identifier[output_filename] )
identifier[h5file] = identifier[h5py] . identifier[File] ( identifier[output_path] , identifier[mode] = literal[string] )
identifier[input_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[directory] , literal[string] )
identifier[tar_file] = identifier[tarfile] . identifier[open] ( identifier[input_file] , literal[string] )
identifier[file] = identifier[tar_file] . identifier[extractfile] ( literal[string] )
keyword[try] :
keyword[if] identifier[six] . identifier[PY3] :
identifier[train] = identifier[cPickle] . identifier[load] ( identifier[file] , identifier[encoding] = literal[string] )
keyword[else] :
identifier[train] = identifier[cPickle] . identifier[load] ( identifier[file] )
keyword[finally] :
identifier[file] . identifier[close] ()
identifier[train_features] = identifier[train] [ literal[string] ]. identifier[reshape] ( identifier[train] [ literal[string] ]. identifier[shape] [ literal[int] ],
literal[int] , literal[int] , literal[int] )
identifier[train_coarse_labels] = identifier[numpy] . identifier[array] ( identifier[train] [ literal[string] ],
identifier[dtype] = identifier[numpy] . identifier[uint8] )
identifier[train_fine_labels] = identifier[numpy] . identifier[array] ( identifier[train] [ literal[string] ],
identifier[dtype] = identifier[numpy] . identifier[uint8] )
identifier[file] = identifier[tar_file] . identifier[extractfile] ( literal[string] )
keyword[try] :
keyword[if] identifier[six] . identifier[PY3] :
identifier[test] = identifier[cPickle] . identifier[load] ( identifier[file] , identifier[encoding] = literal[string] )
keyword[else] :
identifier[test] = identifier[cPickle] . identifier[load] ( identifier[file] )
keyword[finally] :
identifier[file] . identifier[close] ()
identifier[test_features] = identifier[test] [ literal[string] ]. identifier[reshape] ( identifier[test] [ literal[string] ]. identifier[shape] [ literal[int] ],
literal[int] , literal[int] , literal[int] )
identifier[test_coarse_labels] = identifier[numpy] . identifier[array] ( identifier[test] [ literal[string] ], identifier[dtype] = identifier[numpy] . identifier[uint8] )
identifier[test_fine_labels] = identifier[numpy] . identifier[array] ( identifier[test] [ literal[string] ], identifier[dtype] = identifier[numpy] . identifier[uint8] )
identifier[data] =(( literal[string] , literal[string] , identifier[train_features] ),
( literal[string] , literal[string] , identifier[train_coarse_labels] . identifier[reshape] ((- literal[int] , literal[int] ))),
( literal[string] , literal[string] , identifier[train_fine_labels] . identifier[reshape] ((- literal[int] , literal[int] ))),
( literal[string] , literal[string] , identifier[test_features] ),
( literal[string] , literal[string] , identifier[test_coarse_labels] . identifier[reshape] ((- literal[int] , literal[int] ))),
( literal[string] , literal[string] , identifier[test_fine_labels] . identifier[reshape] ((- literal[int] , literal[int] ))))
identifier[fill_hdf5_file] ( identifier[h5file] , identifier[data] )
identifier[h5file] [ literal[string] ]. identifier[dims] [ literal[int] ]. identifier[label] = literal[string]
identifier[h5file] [ literal[string] ]. identifier[dims] [ literal[int] ]. identifier[label] = literal[string]
identifier[h5file] [ literal[string] ]. identifier[dims] [ literal[int] ]. identifier[label] = literal[string]
identifier[h5file] [ literal[string] ]. identifier[dims] [ literal[int] ]. identifier[label] = literal[string]
identifier[h5file] [ literal[string] ]. identifier[dims] [ literal[int] ]. identifier[label] = literal[string]
identifier[h5file] [ literal[string] ]. identifier[dims] [ literal[int] ]. identifier[label] = literal[string]
identifier[h5file] [ literal[string] ]. identifier[dims] [ literal[int] ]. identifier[label] = literal[string]
identifier[h5file] [ literal[string] ]. identifier[dims] [ literal[int] ]. identifier[label] = literal[string]
identifier[h5file] . identifier[flush] ()
identifier[h5file] . identifier[close] ()
keyword[return] ( identifier[output_path] ,) | def convert_cifar100(directory, output_directory, output_filename='cifar100.hdf5'):
"""Converts the CIFAR-100 dataset to HDF5.
Converts the CIFAR-100 dataset to an HDF5 dataset compatible with
:class:`fuel.datasets.CIFAR100`. The converted dataset is saved as
'cifar100.hdf5'.
This method assumes the existence of the following file:
`cifar-100-python.tar.gz`
Parameters
----------
directory : str
Directory in which the required input files reside.
output_directory : str
Directory in which to save the converted dataset.
output_filename : str, optional
Name of the saved dataset. Defaults to 'cifar100.hdf5'.
Returns
-------
output_paths : tuple of str
Single-element tuple containing the path to the converted dataset.
"""
output_path = os.path.join(output_directory, output_filename)
h5file = h5py.File(output_path, mode='w')
input_file = os.path.join(directory, 'cifar-100-python.tar.gz')
tar_file = tarfile.open(input_file, 'r:gz')
file = tar_file.extractfile('cifar-100-python/train')
try:
if six.PY3:
train = cPickle.load(file, encoding='latin1') # depends on [control=['if'], data=[]]
else:
train = cPickle.load(file) # depends on [control=['try'], data=[]]
finally:
file.close()
train_features = train['data'].reshape(train['data'].shape[0], 3, 32, 32)
train_coarse_labels = numpy.array(train['coarse_labels'], dtype=numpy.uint8)
train_fine_labels = numpy.array(train['fine_labels'], dtype=numpy.uint8)
file = tar_file.extractfile('cifar-100-python/test')
try:
if six.PY3:
test = cPickle.load(file, encoding='latin1') # depends on [control=['if'], data=[]]
else:
test = cPickle.load(file) # depends on [control=['try'], data=[]]
finally:
file.close()
test_features = test['data'].reshape(test['data'].shape[0], 3, 32, 32)
test_coarse_labels = numpy.array(test['coarse_labels'], dtype=numpy.uint8)
test_fine_labels = numpy.array(test['fine_labels'], dtype=numpy.uint8)
data = (('train', 'features', train_features), ('train', 'coarse_labels', train_coarse_labels.reshape((-1, 1))), ('train', 'fine_labels', train_fine_labels.reshape((-1, 1))), ('test', 'features', test_features), ('test', 'coarse_labels', test_coarse_labels.reshape((-1, 1))), ('test', 'fine_labels', test_fine_labels.reshape((-1, 1))))
fill_hdf5_file(h5file, data)
h5file['features'].dims[0].label = 'batch'
h5file['features'].dims[1].label = 'channel'
h5file['features'].dims[2].label = 'height'
h5file['features'].dims[3].label = 'width'
h5file['coarse_labels'].dims[0].label = 'batch'
h5file['coarse_labels'].dims[1].label = 'index'
h5file['fine_labels'].dims[0].label = 'batch'
h5file['fine_labels'].dims[1].label = 'index'
h5file.flush()
h5file.close()
return (output_path,) |
def icon(self):
"""Get QIcon from wrapper"""
if self._icon is None:
self._icon = QIcon(self.pm())
return self._icon | def function[icon, parameter[self]]:
constant[Get QIcon from wrapper]
if compare[name[self]._icon is constant[None]] begin[:]
name[self]._icon assign[=] call[name[QIcon], parameter[call[name[self].pm, parameter[]]]]
return[name[self]._icon] | keyword[def] identifier[icon] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_icon] keyword[is] keyword[None] :
identifier[self] . identifier[_icon] = identifier[QIcon] ( identifier[self] . identifier[pm] ())
keyword[return] identifier[self] . identifier[_icon] | def icon(self):
"""Get QIcon from wrapper"""
if self._icon is None:
self._icon = QIcon(self.pm()) # depends on [control=['if'], data=[]]
return self._icon |
def parse(cls, fptr, offset, length):
"""Parse JPEG 2000 file type box.
Parameters
----------
f : file
Open file object.
offset : int
Start position of box in bytes.
length : int
Length of the box in bytes.
Returns
-------
FileTypeBox
Instance of the current file type box.
"""
num_bytes = offset + length - fptr.tell()
read_buffer = fptr.read(num_bytes)
# Extract the brand, minor version.
(brand, minor_version) = struct.unpack_from('>4sI', read_buffer, 0)
if sys.hexversion >= 0x030000:
brand = brand.decode('utf-8')
# Extract the compatibility list. Each entry has 4 bytes.
num_entries = int((length - 16) / 4)
compatibility_list = []
for j in range(int(num_entries)):
entry, = struct.unpack_from('>4s', read_buffer, 8 + j * 4)
if sys.hexversion >= 0x03000000:
try:
entry = entry.decode('utf-8')
except UnicodeDecodeError:
# The entry is invalid, but we've got code to catch this
# later on.
pass
compatibility_list.append(entry)
return cls(brand=brand, minor_version=minor_version,
compatibility_list=compatibility_list,
length=length, offset=offset) | def function[parse, parameter[cls, fptr, offset, length]]:
constant[Parse JPEG 2000 file type box.
Parameters
----------
f : file
Open file object.
offset : int
Start position of box in bytes.
length : int
Length of the box in bytes.
Returns
-------
FileTypeBox
Instance of the current file type box.
]
variable[num_bytes] assign[=] binary_operation[binary_operation[name[offset] + name[length]] - call[name[fptr].tell, parameter[]]]
variable[read_buffer] assign[=] call[name[fptr].read, parameter[name[num_bytes]]]
<ast.Tuple object at 0x7da2054a4850> assign[=] call[name[struct].unpack_from, parameter[constant[>4sI], name[read_buffer], constant[0]]]
if compare[name[sys].hexversion greater_or_equal[>=] constant[196608]] begin[:]
variable[brand] assign[=] call[name[brand].decode, parameter[constant[utf-8]]]
variable[num_entries] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[length] - constant[16]] / constant[4]]]]
variable[compatibility_list] assign[=] list[[]]
for taget[name[j]] in starred[call[name[range], parameter[call[name[int], parameter[name[num_entries]]]]]] begin[:]
<ast.Tuple object at 0x7da20c6e7a60> assign[=] call[name[struct].unpack_from, parameter[constant[>4s], name[read_buffer], binary_operation[constant[8] + binary_operation[name[j] * constant[4]]]]]
if compare[name[sys].hexversion greater_or_equal[>=] constant[50331648]] begin[:]
<ast.Try object at 0x7da20c6e5690>
call[name[compatibility_list].append, parameter[name[entry]]]
return[call[name[cls], parameter[]]] | keyword[def] identifier[parse] ( identifier[cls] , identifier[fptr] , identifier[offset] , identifier[length] ):
literal[string]
identifier[num_bytes] = identifier[offset] + identifier[length] - identifier[fptr] . identifier[tell] ()
identifier[read_buffer] = identifier[fptr] . identifier[read] ( identifier[num_bytes] )
( identifier[brand] , identifier[minor_version] )= identifier[struct] . identifier[unpack_from] ( literal[string] , identifier[read_buffer] , literal[int] )
keyword[if] identifier[sys] . identifier[hexversion] >= literal[int] :
identifier[brand] = identifier[brand] . identifier[decode] ( literal[string] )
identifier[num_entries] = identifier[int] (( identifier[length] - literal[int] )/ literal[int] )
identifier[compatibility_list] =[]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[int] ( identifier[num_entries] )):
identifier[entry] ,= identifier[struct] . identifier[unpack_from] ( literal[string] , identifier[read_buffer] , literal[int] + identifier[j] * literal[int] )
keyword[if] identifier[sys] . identifier[hexversion] >= literal[int] :
keyword[try] :
identifier[entry] = identifier[entry] . identifier[decode] ( literal[string] )
keyword[except] identifier[UnicodeDecodeError] :
keyword[pass]
identifier[compatibility_list] . identifier[append] ( identifier[entry] )
keyword[return] identifier[cls] ( identifier[brand] = identifier[brand] , identifier[minor_version] = identifier[minor_version] ,
identifier[compatibility_list] = identifier[compatibility_list] ,
identifier[length] = identifier[length] , identifier[offset] = identifier[offset] ) | def parse(cls, fptr, offset, length):
"""Parse JPEG 2000 file type box.
Parameters
----------
f : file
Open file object.
offset : int
Start position of box in bytes.
length : int
Length of the box in bytes.
Returns
-------
FileTypeBox
Instance of the current file type box.
"""
num_bytes = offset + length - fptr.tell()
read_buffer = fptr.read(num_bytes)
# Extract the brand, minor version.
(brand, minor_version) = struct.unpack_from('>4sI', read_buffer, 0)
if sys.hexversion >= 196608:
brand = brand.decode('utf-8') # depends on [control=['if'], data=[]]
# Extract the compatibility list. Each entry has 4 bytes.
num_entries = int((length - 16) / 4)
compatibility_list = []
for j in range(int(num_entries)):
(entry,) = struct.unpack_from('>4s', read_buffer, 8 + j * 4)
if sys.hexversion >= 50331648:
try:
entry = entry.decode('utf-8') # depends on [control=['try'], data=[]]
except UnicodeDecodeError:
# The entry is invalid, but we've got code to catch this
# later on.
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
compatibility_list.append(entry) # depends on [control=['for'], data=['j']]
return cls(brand=brand, minor_version=minor_version, compatibility_list=compatibility_list, length=length, offset=offset) |
def to_wider_model(self, pre_layer_id, n_add):
"""Widen the last dimension of the output of the pre_layer.
Args:
pre_layer_id: The ID of a convolutional layer or dense layer.
n_add: The number of dimensions to add.
"""
self.operation_history.append(("to_wider_model", pre_layer_id, n_add))
pre_layer = self.layer_list[pre_layer_id]
output_id = self.layer_id_to_output_node_ids[pre_layer_id][0]
dim = layer_width(pre_layer)
self.vis = {}
self._search(output_id, dim, dim, n_add)
# Update the tensor shapes.
for u in self.topological_order:
for v, layer_id in self.adj_list[u]:
self.node_list[v].shape = self.layer_list[layer_id].output_shape | def function[to_wider_model, parameter[self, pre_layer_id, n_add]]:
constant[Widen the last dimension of the output of the pre_layer.
Args:
pre_layer_id: The ID of a convolutional layer or dense layer.
n_add: The number of dimensions to add.
]
call[name[self].operation_history.append, parameter[tuple[[<ast.Constant object at 0x7da18ede4850>, <ast.Name object at 0x7da18ede5480>, <ast.Name object at 0x7da18ede5060>]]]]
variable[pre_layer] assign[=] call[name[self].layer_list][name[pre_layer_id]]
variable[output_id] assign[=] call[call[name[self].layer_id_to_output_node_ids][name[pre_layer_id]]][constant[0]]
variable[dim] assign[=] call[name[layer_width], parameter[name[pre_layer]]]
name[self].vis assign[=] dictionary[[], []]
call[name[self]._search, parameter[name[output_id], name[dim], name[dim], name[n_add]]]
for taget[name[u]] in starred[name[self].topological_order] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18ede5840>, <ast.Name object at 0x7da18ede6110>]]] in starred[call[name[self].adj_list][name[u]]] begin[:]
call[name[self].node_list][name[v]].shape assign[=] call[name[self].layer_list][name[layer_id]].output_shape | keyword[def] identifier[to_wider_model] ( identifier[self] , identifier[pre_layer_id] , identifier[n_add] ):
literal[string]
identifier[self] . identifier[operation_history] . identifier[append] (( literal[string] , identifier[pre_layer_id] , identifier[n_add] ))
identifier[pre_layer] = identifier[self] . identifier[layer_list] [ identifier[pre_layer_id] ]
identifier[output_id] = identifier[self] . identifier[layer_id_to_output_node_ids] [ identifier[pre_layer_id] ][ literal[int] ]
identifier[dim] = identifier[layer_width] ( identifier[pre_layer] )
identifier[self] . identifier[vis] ={}
identifier[self] . identifier[_search] ( identifier[output_id] , identifier[dim] , identifier[dim] , identifier[n_add] )
keyword[for] identifier[u] keyword[in] identifier[self] . identifier[topological_order] :
keyword[for] identifier[v] , identifier[layer_id] keyword[in] identifier[self] . identifier[adj_list] [ identifier[u] ]:
identifier[self] . identifier[node_list] [ identifier[v] ]. identifier[shape] = identifier[self] . identifier[layer_list] [ identifier[layer_id] ]. identifier[output_shape] | def to_wider_model(self, pre_layer_id, n_add):
"""Widen the last dimension of the output of the pre_layer.
Args:
pre_layer_id: The ID of a convolutional layer or dense layer.
n_add: The number of dimensions to add.
"""
self.operation_history.append(('to_wider_model', pre_layer_id, n_add))
pre_layer = self.layer_list[pre_layer_id]
output_id = self.layer_id_to_output_node_ids[pre_layer_id][0]
dim = layer_width(pre_layer)
self.vis = {}
self._search(output_id, dim, dim, n_add)
# Update the tensor shapes.
for u in self.topological_order:
for (v, layer_id) in self.adj_list[u]:
self.node_list[v].shape = self.layer_list[layer_id].output_shape # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['u']] |
def parameter_count(funcsig):
"""Get the number of positional-or-keyword or position-only parameters in a
function signature.
Parameters
----------
funcsig : inspect.Signature
A UDF signature
Returns
-------
int
The number of parameters
"""
return sum(
param.kind in {param.POSITIONAL_OR_KEYWORD, param.POSITIONAL_ONLY}
for param in funcsig.parameters.values()
if param.default is Parameter.empty
) | def function[parameter_count, parameter[funcsig]]:
constant[Get the number of positional-or-keyword or position-only parameters in a
function signature.
Parameters
----------
funcsig : inspect.Signature
A UDF signature
Returns
-------
int
The number of parameters
]
return[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da20e957280>]]] | keyword[def] identifier[parameter_count] ( identifier[funcsig] ):
literal[string]
keyword[return] identifier[sum] (
identifier[param] . identifier[kind] keyword[in] { identifier[param] . identifier[POSITIONAL_OR_KEYWORD] , identifier[param] . identifier[POSITIONAL_ONLY] }
keyword[for] identifier[param] keyword[in] identifier[funcsig] . identifier[parameters] . identifier[values] ()
keyword[if] identifier[param] . identifier[default] keyword[is] identifier[Parameter] . identifier[empty]
) | def parameter_count(funcsig):
"""Get the number of positional-or-keyword or position-only parameters in a
function signature.
Parameters
----------
funcsig : inspect.Signature
A UDF signature
Returns
-------
int
The number of parameters
"""
return sum((param.kind in {param.POSITIONAL_OR_KEYWORD, param.POSITIONAL_ONLY} for param in funcsig.parameters.values() if param.default is Parameter.empty)) |
def to_python(self):
"""Returns a plain python dict and converts to plain python objects all
this object's descendants.
"""
result = dict(self)
for key, value in iteritems(result):
if isinstance(value, DottedCollection):
result[key] = value.to_python()
return result | def function[to_python, parameter[self]]:
constant[Returns a plain python dict and converts to plain python objects all
this object's descendants.
]
variable[result] assign[=] call[name[dict], parameter[name[self]]]
for taget[tuple[[<ast.Name object at 0x7da204567430>, <ast.Name object at 0x7da204567e80>]]] in starred[call[name[iteritems], parameter[name[result]]]] begin[:]
if call[name[isinstance], parameter[name[value], name[DottedCollection]]] begin[:]
call[name[result]][name[key]] assign[=] call[name[value].to_python, parameter[]]
return[name[result]] | keyword[def] identifier[to_python] ( identifier[self] ):
literal[string]
identifier[result] = identifier[dict] ( identifier[self] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[iteritems] ( identifier[result] ):
keyword[if] identifier[isinstance] ( identifier[value] , identifier[DottedCollection] ):
identifier[result] [ identifier[key] ]= identifier[value] . identifier[to_python] ()
keyword[return] identifier[result] | def to_python(self):
"""Returns a plain python dict and converts to plain python objects all
this object's descendants.
"""
result = dict(self)
for (key, value) in iteritems(result):
if isinstance(value, DottedCollection):
result[key] = value.to_python() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return result |
def prefix(self, imod: YangIdentifier, mid: ModuleId) -> YangIdentifier:
"""Return the prefix corresponding to an implemented module.
Args:
imod: Name of an implemented module.
mid: Identifier of the context module.
Raises:
ModuleNotImplemented: If `imod` is not implemented.
ModuleNotRegistered: If `mid` is not registered in YANG library.
ModuleNotImported: If `imod` is not imported in `mid`.
"""
try:
did = (imod, self.implement[imod])
except KeyError:
raise ModuleNotImplemented(imod) from None
try:
pmap = self.modules[mid].prefix_map
except KeyError:
raise ModuleNotRegistered(*mid) from None
for p in pmap:
if pmap[p] == did:
return p
raise ModuleNotImported(imod, mid) | def function[prefix, parameter[self, imod, mid]]:
constant[Return the prefix corresponding to an implemented module.
Args:
imod: Name of an implemented module.
mid: Identifier of the context module.
Raises:
ModuleNotImplemented: If `imod` is not implemented.
ModuleNotRegistered: If `mid` is not registered in YANG library.
ModuleNotImported: If `imod` is not imported in `mid`.
]
<ast.Try object at 0x7da1b04d53f0>
<ast.Try object at 0x7da1b04d6260>
for taget[name[p]] in starred[name[pmap]] begin[:]
if compare[call[name[pmap]][name[p]] equal[==] name[did]] begin[:]
return[name[p]]
<ast.Raise object at 0x7da1b052aaa0> | keyword[def] identifier[prefix] ( identifier[self] , identifier[imod] : identifier[YangIdentifier] , identifier[mid] : identifier[ModuleId] )-> identifier[YangIdentifier] :
literal[string]
keyword[try] :
identifier[did] =( identifier[imod] , identifier[self] . identifier[implement] [ identifier[imod] ])
keyword[except] identifier[KeyError] :
keyword[raise] identifier[ModuleNotImplemented] ( identifier[imod] ) keyword[from] keyword[None]
keyword[try] :
identifier[pmap] = identifier[self] . identifier[modules] [ identifier[mid] ]. identifier[prefix_map]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[ModuleNotRegistered] (* identifier[mid] ) keyword[from] keyword[None]
keyword[for] identifier[p] keyword[in] identifier[pmap] :
keyword[if] identifier[pmap] [ identifier[p] ]== identifier[did] :
keyword[return] identifier[p]
keyword[raise] identifier[ModuleNotImported] ( identifier[imod] , identifier[mid] ) | def prefix(self, imod: YangIdentifier, mid: ModuleId) -> YangIdentifier:
"""Return the prefix corresponding to an implemented module.
Args:
imod: Name of an implemented module.
mid: Identifier of the context module.
Raises:
ModuleNotImplemented: If `imod` is not implemented.
ModuleNotRegistered: If `mid` is not registered in YANG library.
ModuleNotImported: If `imod` is not imported in `mid`.
"""
try:
did = (imod, self.implement[imod]) # depends on [control=['try'], data=[]]
except KeyError:
raise ModuleNotImplemented(imod) from None # depends on [control=['except'], data=[]]
try:
pmap = self.modules[mid].prefix_map # depends on [control=['try'], data=[]]
except KeyError:
raise ModuleNotRegistered(*mid) from None # depends on [control=['except'], data=[]]
for p in pmap:
if pmap[p] == did:
return p # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p']]
raise ModuleNotImported(imod, mid) |
def state(self, value):
"""set player state, emit state changed signal
outer object should not set state directly,
use ``pause`` / ``resume`` / ``stop`` / ``play`` method instead.
"""
self._state = value
self.state_changed.emit(value) | def function[state, parameter[self, value]]:
constant[set player state, emit state changed signal
outer object should not set state directly,
use ``pause`` / ``resume`` / ``stop`` / ``play`` method instead.
]
name[self]._state assign[=] name[value]
call[name[self].state_changed.emit, parameter[name[value]]] | keyword[def] identifier[state] ( identifier[self] , identifier[value] ):
literal[string]
identifier[self] . identifier[_state] = identifier[value]
identifier[self] . identifier[state_changed] . identifier[emit] ( identifier[value] ) | def state(self, value):
"""set player state, emit state changed signal
outer object should not set state directly,
use ``pause`` / ``resume`` / ``stop`` / ``play`` method instead.
"""
self._state = value
self.state_changed.emit(value) |
def get_values(self, attr_name):
"""
Retrieves the unique set of values seen for the given attribute
at this node.
"""
ret = list(self._attr_value_cdist[attr_name].keys()) \
+ list(self._attr_value_counts[attr_name].keys()) \
+ list(self._branches.keys())
ret = set(ret)
return ret | def function[get_values, parameter[self, attr_name]]:
constant[
Retrieves the unique set of values seen for the given attribute
at this node.
]
variable[ret] assign[=] binary_operation[binary_operation[call[name[list], parameter[call[call[name[self]._attr_value_cdist][name[attr_name]].keys, parameter[]]]] + call[name[list], parameter[call[call[name[self]._attr_value_counts][name[attr_name]].keys, parameter[]]]]] + call[name[list], parameter[call[name[self]._branches.keys, parameter[]]]]]
variable[ret] assign[=] call[name[set], parameter[name[ret]]]
return[name[ret]] | keyword[def] identifier[get_values] ( identifier[self] , identifier[attr_name] ):
literal[string]
identifier[ret] = identifier[list] ( identifier[self] . identifier[_attr_value_cdist] [ identifier[attr_name] ]. identifier[keys] ())+ identifier[list] ( identifier[self] . identifier[_attr_value_counts] [ identifier[attr_name] ]. identifier[keys] ())+ identifier[list] ( identifier[self] . identifier[_branches] . identifier[keys] ())
identifier[ret] = identifier[set] ( identifier[ret] )
keyword[return] identifier[ret] | def get_values(self, attr_name):
"""
Retrieves the unique set of values seen for the given attribute
at this node.
"""
ret = list(self._attr_value_cdist[attr_name].keys()) + list(self._attr_value_counts[attr_name].keys()) + list(self._branches.keys())
ret = set(ret)
return ret |
def msgblock(key, text, side='|'):
""" puts text inside a visual ascii block """
blocked_text = ''.join(
[' + --- ', key, ' ---\n'] +
[' ' + side + ' ' + line + '\n' for line in text.split('\n')] +
[' L ___ ', key, ' ___\n']
)
return blocked_text | def function[msgblock, parameter[key, text, side]]:
constant[ puts text inside a visual ascii block ]
variable[blocked_text] assign[=] call[constant[].join, parameter[binary_operation[binary_operation[list[[<ast.Constant object at 0x7da1b24b43d0>, <ast.Name object at 0x7da1b24b5870>, <ast.Constant object at 0x7da1b24b7130>]] + <ast.ListComp object at 0x7da1b24b5c90>] + list[[<ast.Constant object at 0x7da1b24b7a30>, <ast.Name object at 0x7da1b24b69b0>, <ast.Constant object at 0x7da1b24b42e0>]]]]]
return[name[blocked_text]] | keyword[def] identifier[msgblock] ( identifier[key] , identifier[text] , identifier[side] = literal[string] ):
literal[string]
identifier[blocked_text] = literal[string] . identifier[join] (
[ literal[string] , identifier[key] , literal[string] ]+
[ literal[string] + identifier[side] + literal[string] + identifier[line] + literal[string] keyword[for] identifier[line] keyword[in] identifier[text] . identifier[split] ( literal[string] )]+
[ literal[string] , identifier[key] , literal[string] ]
)
keyword[return] identifier[blocked_text] | def msgblock(key, text, side='|'):
""" puts text inside a visual ascii block """
blocked_text = ''.join([' + --- ', key, ' ---\n'] + [' ' + side + ' ' + line + '\n' for line in text.split('\n')] + [' L ___ ', key, ' ___\n'])
return blocked_text |
def get_url_feed(self, package=None, timeout=None):
""" Get a live file feed with the latest files submitted to VirusTotal.
Allows you to retrieve a live feed of reports on absolutely all URLs scanned by VirusTotal. This API requires
you to stay relatively synced with the live submissions as only a backlog of 24 hours is provided at any given
point in time.
This API returns a bzip2 compressed tarball. For per-minute packages the compressed package contains a unique
file, the file contains a json per line, this json is a full report on a given URL processed by VirusTotal
during the given time window. The URL report follows the exact same format as the response of the URL report
API if the allinfo=1 parameter is provided. For hourly packages, the tarball contains 60 files, one per each
minute of the window.
:param package: Indicates a time window to pull reports on all items received during such window.
Only per-minute and hourly windows are allowed, the format is %Y%m%dT%H%M (e.g. 20160304T0900)
or %Y%m%dT%H (e.g. 20160304T09). Time is expressed in UTC.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: BZIP2 response: please see https://www.virustotal.com/en/documentation/private-api/#file-feed
"""
if package is None:
now = datetime.utcnow()
five_minutes_ago = now - timedelta(
minutes=now.minute % 5 + 5, seconds=now.second, microseconds=now.microsecond)
package = five_minutes_ago.strftime('%Y%m%dT%H%M')
params = {'apikey': self.api_key, 'package': package}
try:
response = requests.get(self.base + 'url/feed', params=params, proxies=self.proxies, timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response, json_results=False) | def function[get_url_feed, parameter[self, package, timeout]]:
constant[ Get a live file feed with the latest files submitted to VirusTotal.
Allows you to retrieve a live feed of reports on absolutely all URLs scanned by VirusTotal. This API requires
you to stay relatively synced with the live submissions as only a backlog of 24 hours is provided at any given
point in time.
This API returns a bzip2 compressed tarball. For per-minute packages the compressed package contains a unique
file, the file contains a json per line, this json is a full report on a given URL processed by VirusTotal
during the given time window. The URL report follows the exact same format as the response of the URL report
API if the allinfo=1 parameter is provided. For hourly packages, the tarball contains 60 files, one per each
minute of the window.
:param package: Indicates a time window to pull reports on all items received during such window.
Only per-minute and hourly windows are allowed, the format is %Y%m%dT%H%M (e.g. 20160304T0900)
or %Y%m%dT%H (e.g. 20160304T09). Time is expressed in UTC.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: BZIP2 response: please see https://www.virustotal.com/en/documentation/private-api/#file-feed
]
if compare[name[package] is constant[None]] begin[:]
variable[now] assign[=] call[name[datetime].utcnow, parameter[]]
variable[five_minutes_ago] assign[=] binary_operation[name[now] - call[name[timedelta], parameter[]]]
variable[package] assign[=] call[name[five_minutes_ago].strftime, parameter[constant[%Y%m%dT%H%M]]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b0f38c40>, <ast.Constant object at 0x7da1b0f3ad70>], [<ast.Attribute object at 0x7da1b0f39ae0>, <ast.Name object at 0x7da1b0f39a20>]]
<ast.Try object at 0x7da1b0f3b640>
return[call[name[_return_response_and_status_code], parameter[name[response]]]] | keyword[def] identifier[get_url_feed] ( identifier[self] , identifier[package] = keyword[None] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[if] identifier[package] keyword[is] keyword[None] :
identifier[now] = identifier[datetime] . identifier[utcnow] ()
identifier[five_minutes_ago] = identifier[now] - identifier[timedelta] (
identifier[minutes] = identifier[now] . identifier[minute] % literal[int] + literal[int] , identifier[seconds] = identifier[now] . identifier[second] , identifier[microseconds] = identifier[now] . identifier[microsecond] )
identifier[package] = identifier[five_minutes_ago] . identifier[strftime] ( literal[string] )
identifier[params] ={ literal[string] : identifier[self] . identifier[api_key] , literal[string] : identifier[package] }
keyword[try] :
identifier[response] = identifier[requests] . identifier[get] ( identifier[self] . identifier[base] + literal[string] , identifier[params] = identifier[params] , identifier[proxies] = identifier[self] . identifier[proxies] , identifier[timeout] = identifier[timeout] )
keyword[except] identifier[requests] . identifier[RequestException] keyword[as] identifier[e] :
keyword[return] identifier[dict] ( identifier[error] = identifier[str] ( identifier[e] ))
keyword[return] identifier[_return_response_and_status_code] ( identifier[response] , identifier[json_results] = keyword[False] ) | def get_url_feed(self, package=None, timeout=None):
""" Get a live file feed with the latest files submitted to VirusTotal.
Allows you to retrieve a live feed of reports on absolutely all URLs scanned by VirusTotal. This API requires
you to stay relatively synced with the live submissions as only a backlog of 24 hours is provided at any given
point in time.
This API returns a bzip2 compressed tarball. For per-minute packages the compressed package contains a unique
file, the file contains a json per line, this json is a full report on a given URL processed by VirusTotal
during the given time window. The URL report follows the exact same format as the response of the URL report
API if the allinfo=1 parameter is provided. For hourly packages, the tarball contains 60 files, one per each
minute of the window.
:param package: Indicates a time window to pull reports on all items received during such window.
Only per-minute and hourly windows are allowed, the format is %Y%m%dT%H%M (e.g. 20160304T0900)
or %Y%m%dT%H (e.g. 20160304T09). Time is expressed in UTC.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: BZIP2 response: please see https://www.virustotal.com/en/documentation/private-api/#file-feed
"""
if package is None:
now = datetime.utcnow()
five_minutes_ago = now - timedelta(minutes=now.minute % 5 + 5, seconds=now.second, microseconds=now.microsecond)
package = five_minutes_ago.strftime('%Y%m%dT%H%M') # depends on [control=['if'], data=['package']]
params = {'apikey': self.api_key, 'package': package}
try:
response = requests.get(self.base + 'url/feed', params=params, proxies=self.proxies, timeout=timeout) # depends on [control=['try'], data=[]]
except requests.RequestException as e:
return dict(error=str(e)) # depends on [control=['except'], data=['e']]
return _return_response_and_status_code(response, json_results=False) |
def _format_metadata_attribute(value):
"""Format a value for writing to HDF5 as a `h5py.Dataset` attribute
"""
if (value is None or
(isinstance(value, Index) and value.regular)):
raise IgnoredAttribute
# map type to something HDF5 can handle
for typekey, func in ATTR_TYPE_MAP.items():
if issubclass(type(value), typekey):
return func(value)
return value | def function[_format_metadata_attribute, parameter[value]]:
constant[Format a value for writing to HDF5 as a `h5py.Dataset` attribute
]
if <ast.BoolOp object at 0x7da2043450c0> begin[:]
<ast.Raise object at 0x7da20e9b19f0>
for taget[tuple[[<ast.Name object at 0x7da20e9b3d30>, <ast.Name object at 0x7da20e9b1930>]]] in starred[call[name[ATTR_TYPE_MAP].items, parameter[]]] begin[:]
if call[name[issubclass], parameter[call[name[type], parameter[name[value]]], name[typekey]]] begin[:]
return[call[name[func], parameter[name[value]]]]
return[name[value]] | keyword[def] identifier[_format_metadata_attribute] ( identifier[value] ):
literal[string]
keyword[if] ( identifier[value] keyword[is] keyword[None] keyword[or]
( identifier[isinstance] ( identifier[value] , identifier[Index] ) keyword[and] identifier[value] . identifier[regular] )):
keyword[raise] identifier[IgnoredAttribute]
keyword[for] identifier[typekey] , identifier[func] keyword[in] identifier[ATTR_TYPE_MAP] . identifier[items] ():
keyword[if] identifier[issubclass] ( identifier[type] ( identifier[value] ), identifier[typekey] ):
keyword[return] identifier[func] ( identifier[value] )
keyword[return] identifier[value] | def _format_metadata_attribute(value):
"""Format a value for writing to HDF5 as a `h5py.Dataset` attribute
"""
if value is None or (isinstance(value, Index) and value.regular):
raise IgnoredAttribute # depends on [control=['if'], data=[]]
# map type to something HDF5 can handle
for (typekey, func) in ATTR_TYPE_MAP.items():
if issubclass(type(value), typekey):
return func(value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return value |
def create_default_links(self):
"""Create the default links between the IM and the device."""
self._plm.manage_aldb_record(0x40, 0xe2, 0x00, self.address,
self.cat, self.subcat, self.product_key)
self.manage_aldb_record(0x41, 0xa2, 0x00, self._plm.address,
self._plm.cat, self._plm.subcat,
self._plm.product_key)
for link in self._stateList:
state = self._stateList[link]
if state.is_responder:
# IM is controller
self._plm.manage_aldb_record(0x40, 0xe2, link, self._address,
0x00, 0x00, 0x00)
# Device is responder
self.manage_aldb_record(0x41, 0xa2, link, self._plm.address,
state.linkdata1, state.linkdata2,
state.linkdata3)
if state.is_controller:
# IM is responder
self._plm.manage_aldb_record(0x41, 0xa2, link, self._address,
0x00, 0x00, 0x00)
# Device is controller
self.manage_aldb_record(0x40, 0xe2, link, self._plm.address,
0x00, 0x00, 0x00)
self.read_aldb() | def function[create_default_links, parameter[self]]:
constant[Create the default links between the IM and the device.]
call[name[self]._plm.manage_aldb_record, parameter[constant[64], constant[226], constant[0], name[self].address, name[self].cat, name[self].subcat, name[self].product_key]]
call[name[self].manage_aldb_record, parameter[constant[65], constant[162], constant[0], name[self]._plm.address, name[self]._plm.cat, name[self]._plm.subcat, name[self]._plm.product_key]]
for taget[name[link]] in starred[name[self]._stateList] begin[:]
variable[state] assign[=] call[name[self]._stateList][name[link]]
if name[state].is_responder begin[:]
call[name[self]._plm.manage_aldb_record, parameter[constant[64], constant[226], name[link], name[self]._address, constant[0], constant[0], constant[0]]]
call[name[self].manage_aldb_record, parameter[constant[65], constant[162], name[link], name[self]._plm.address, name[state].linkdata1, name[state].linkdata2, name[state].linkdata3]]
if name[state].is_controller begin[:]
call[name[self]._plm.manage_aldb_record, parameter[constant[65], constant[162], name[link], name[self]._address, constant[0], constant[0], constant[0]]]
call[name[self].manage_aldb_record, parameter[constant[64], constant[226], name[link], name[self]._plm.address, constant[0], constant[0], constant[0]]]
call[name[self].read_aldb, parameter[]] | keyword[def] identifier[create_default_links] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_plm] . identifier[manage_aldb_record] ( literal[int] , literal[int] , literal[int] , identifier[self] . identifier[address] ,
identifier[self] . identifier[cat] , identifier[self] . identifier[subcat] , identifier[self] . identifier[product_key] )
identifier[self] . identifier[manage_aldb_record] ( literal[int] , literal[int] , literal[int] , identifier[self] . identifier[_plm] . identifier[address] ,
identifier[self] . identifier[_plm] . identifier[cat] , identifier[self] . identifier[_plm] . identifier[subcat] ,
identifier[self] . identifier[_plm] . identifier[product_key] )
keyword[for] identifier[link] keyword[in] identifier[self] . identifier[_stateList] :
identifier[state] = identifier[self] . identifier[_stateList] [ identifier[link] ]
keyword[if] identifier[state] . identifier[is_responder] :
identifier[self] . identifier[_plm] . identifier[manage_aldb_record] ( literal[int] , literal[int] , identifier[link] , identifier[self] . identifier[_address] ,
literal[int] , literal[int] , literal[int] )
identifier[self] . identifier[manage_aldb_record] ( literal[int] , literal[int] , identifier[link] , identifier[self] . identifier[_plm] . identifier[address] ,
identifier[state] . identifier[linkdata1] , identifier[state] . identifier[linkdata2] ,
identifier[state] . identifier[linkdata3] )
keyword[if] identifier[state] . identifier[is_controller] :
identifier[self] . identifier[_plm] . identifier[manage_aldb_record] ( literal[int] , literal[int] , identifier[link] , identifier[self] . identifier[_address] ,
literal[int] , literal[int] , literal[int] )
identifier[self] . identifier[manage_aldb_record] ( literal[int] , literal[int] , identifier[link] , identifier[self] . identifier[_plm] . identifier[address] ,
literal[int] , literal[int] , literal[int] )
identifier[self] . identifier[read_aldb] () | def create_default_links(self):
"""Create the default links between the IM and the device."""
self._plm.manage_aldb_record(64, 226, 0, self.address, self.cat, self.subcat, self.product_key)
self.manage_aldb_record(65, 162, 0, self._plm.address, self._plm.cat, self._plm.subcat, self._plm.product_key)
for link in self._stateList:
state = self._stateList[link]
if state.is_responder:
# IM is controller
self._plm.manage_aldb_record(64, 226, link, self._address, 0, 0, 0)
# Device is responder
self.manage_aldb_record(65, 162, link, self._plm.address, state.linkdata1, state.linkdata2, state.linkdata3) # depends on [control=['if'], data=[]]
if state.is_controller:
# IM is responder
self._plm.manage_aldb_record(65, 162, link, self._address, 0, 0, 0)
# Device is controller
self.manage_aldb_record(64, 226, link, self._plm.address, 0, 0, 0) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['link']]
self.read_aldb() |
def print_(
self,
x: int,
y: int,
string: str,
bg_blend: int = tcod.constants.BKGND_DEFAULT,
alignment: Optional[int] = None,
) -> None:
"""Print a color formatted string on a console.
Args:
x (int): The x coordinate from the left.
y (int): The y coordinate from the top.
string (str): A Unicode string optionally using color codes.
bg_blend (int): Blending mode to use, defaults to BKGND_DEFAULT.
alignment (Optional[int]): Text alignment.
.. deprecated:: 8.5
Console methods which depend on console defaults have been
deprecated.
Use :any:`Console.print` instead, calling this function will print
a warning detailing which default values need to be made explicit.
"""
self.__deprecate_defaults("print", bg_blend, alignment)
alignment = self.default_alignment if alignment is None else alignment
lib.TCOD_console_printf_ex(
self.console_c, x, y, bg_blend, alignment, _fmt(string)
) | def function[print_, parameter[self, x, y, string, bg_blend, alignment]]:
constant[Print a color formatted string on a console.
Args:
x (int): The x coordinate from the left.
y (int): The y coordinate from the top.
string (str): A Unicode string optionally using color codes.
bg_blend (int): Blending mode to use, defaults to BKGND_DEFAULT.
alignment (Optional[int]): Text alignment.
.. deprecated:: 8.5
Console methods which depend on console defaults have been
deprecated.
Use :any:`Console.print` instead, calling this function will print
a warning detailing which default values need to be made explicit.
]
call[name[self].__deprecate_defaults, parameter[constant[print], name[bg_blend], name[alignment]]]
variable[alignment] assign[=] <ast.IfExp object at 0x7da18f58fc70>
call[name[lib].TCOD_console_printf_ex, parameter[name[self].console_c, name[x], name[y], name[bg_blend], name[alignment], call[name[_fmt], parameter[name[string]]]]] | keyword[def] identifier[print_] (
identifier[self] ,
identifier[x] : identifier[int] ,
identifier[y] : identifier[int] ,
identifier[string] : identifier[str] ,
identifier[bg_blend] : identifier[int] = identifier[tcod] . identifier[constants] . identifier[BKGND_DEFAULT] ,
identifier[alignment] : identifier[Optional] [ identifier[int] ]= keyword[None] ,
)-> keyword[None] :
literal[string]
identifier[self] . identifier[__deprecate_defaults] ( literal[string] , identifier[bg_blend] , identifier[alignment] )
identifier[alignment] = identifier[self] . identifier[default_alignment] keyword[if] identifier[alignment] keyword[is] keyword[None] keyword[else] identifier[alignment]
identifier[lib] . identifier[TCOD_console_printf_ex] (
identifier[self] . identifier[console_c] , identifier[x] , identifier[y] , identifier[bg_blend] , identifier[alignment] , identifier[_fmt] ( identifier[string] )
) | def print_(self, x: int, y: int, string: str, bg_blend: int=tcod.constants.BKGND_DEFAULT, alignment: Optional[int]=None) -> None:
"""Print a color formatted string on a console.
Args:
x (int): The x coordinate from the left.
y (int): The y coordinate from the top.
string (str): A Unicode string optionally using color codes.
bg_blend (int): Blending mode to use, defaults to BKGND_DEFAULT.
alignment (Optional[int]): Text alignment.
.. deprecated:: 8.5
Console methods which depend on console defaults have been
deprecated.
Use :any:`Console.print` instead, calling this function will print
a warning detailing which default values need to be made explicit.
"""
self.__deprecate_defaults('print', bg_blend, alignment)
alignment = self.default_alignment if alignment is None else alignment
lib.TCOD_console_printf_ex(self.console_c, x, y, bg_blend, alignment, _fmt(string)) |
def calculate_z1pt0(vs30):
'''
Reads an array of vs30 values (in m/s) and
returns the depth to the 1.0 km/s velocity horizon (in m)
Ref: Chiou & Youngs (2014) California model
:param vs30: the shear wave velocity (in m/s) at a depth of 30m
'''
c1 = 571 ** 4.
c2 = 1360.0 ** 4.
return numpy.exp((-7.15 / 4.0) * numpy.log((vs30 ** 4. + c1) / (c2 + c1))) | def function[calculate_z1pt0, parameter[vs30]]:
constant[
Reads an array of vs30 values (in m/s) and
returns the depth to the 1.0 km/s velocity horizon (in m)
Ref: Chiou & Youngs (2014) California model
:param vs30: the shear wave velocity (in m/s) at a depth of 30m
]
variable[c1] assign[=] binary_operation[constant[571] ** constant[4.0]]
variable[c2] assign[=] binary_operation[constant[1360.0] ** constant[4.0]]
return[call[name[numpy].exp, parameter[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b138fa30> / constant[4.0]] * call[name[numpy].log, parameter[binary_operation[binary_operation[binary_operation[name[vs30] ** constant[4.0]] + name[c1]] / binary_operation[name[c2] + name[c1]]]]]]]]] | keyword[def] identifier[calculate_z1pt0] ( identifier[vs30] ):
literal[string]
identifier[c1] = literal[int] ** literal[int]
identifier[c2] = literal[int] ** literal[int]
keyword[return] identifier[numpy] . identifier[exp] ((- literal[int] / literal[int] )* identifier[numpy] . identifier[log] (( identifier[vs30] ** literal[int] + identifier[c1] )/( identifier[c2] + identifier[c1] ))) | def calculate_z1pt0(vs30):
"""
Reads an array of vs30 values (in m/s) and
returns the depth to the 1.0 km/s velocity horizon (in m)
Ref: Chiou & Youngs (2014) California model
:param vs30: the shear wave velocity (in m/s) at a depth of 30m
"""
c1 = 571 ** 4.0
c2 = 1360.0 ** 4.0
return numpy.exp(-7.15 / 4.0 * numpy.log((vs30 ** 4.0 + c1) / (c2 + c1))) |
def merge_dictionary(dst, src):
"""Recursive merge two dicts (vs .update which overwrites the hashes at the
root level)
Note: This updates dst.
Copied from checkmate.utils
"""
stack = [(dst, src)]
while stack:
current_dst, current_src = stack.pop()
for key in current_src:
source = current_src[key]
if key not in current_dst:
current_dst[key] = source
else:
dest = current_dst[key]
if isinstance(source, dict) and isinstance(dest, dict):
stack.append((dest, source))
elif isinstance(source, list) and isinstance(dest, list):
# Make them the same size
r = dest[:]
s = source[:]
if len(dest) > len(source):
s.append([None for i in range(len(dest) -
len(source))])
elif len(dest) < len(source):
r.append([None for i in range(len(source) -
len(dest))])
# Merge lists
for index, value in enumerate(r):
if (not value) and s[index]:
r[index] = s[index]
elif isinstance(value, dict) and \
isinstance(s[index], dict):
stack.append((dest[index], source[index]))
else:
dest[index] = s[index]
current_dst[key] = r
else:
current_dst[key] = source
return dst | def function[merge_dictionary, parameter[dst, src]]:
constant[Recursive merge two dicts (vs .update which overwrites the hashes at the
root level)
Note: This updates dst.
Copied from checkmate.utils
]
variable[stack] assign[=] list[[<ast.Tuple object at 0x7da1b0192b00>]]
while name[stack] begin[:]
<ast.Tuple object at 0x7da1b0191d80> assign[=] call[name[stack].pop, parameter[]]
for taget[name[key]] in starred[name[current_src]] begin[:]
variable[source] assign[=] call[name[current_src]][name[key]]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[current_dst]] begin[:]
call[name[current_dst]][name[key]] assign[=] name[source]
return[name[dst]] | keyword[def] identifier[merge_dictionary] ( identifier[dst] , identifier[src] ):
literal[string]
identifier[stack] =[( identifier[dst] , identifier[src] )]
keyword[while] identifier[stack] :
identifier[current_dst] , identifier[current_src] = identifier[stack] . identifier[pop] ()
keyword[for] identifier[key] keyword[in] identifier[current_src] :
identifier[source] = identifier[current_src] [ identifier[key] ]
keyword[if] identifier[key] keyword[not] keyword[in] identifier[current_dst] :
identifier[current_dst] [ identifier[key] ]= identifier[source]
keyword[else] :
identifier[dest] = identifier[current_dst] [ identifier[key] ]
keyword[if] identifier[isinstance] ( identifier[source] , identifier[dict] ) keyword[and] identifier[isinstance] ( identifier[dest] , identifier[dict] ):
identifier[stack] . identifier[append] (( identifier[dest] , identifier[source] ))
keyword[elif] identifier[isinstance] ( identifier[source] , identifier[list] ) keyword[and] identifier[isinstance] ( identifier[dest] , identifier[list] ):
identifier[r] = identifier[dest] [:]
identifier[s] = identifier[source] [:]
keyword[if] identifier[len] ( identifier[dest] )> identifier[len] ( identifier[source] ):
identifier[s] . identifier[append] ([ keyword[None] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[dest] )-
identifier[len] ( identifier[source] ))])
keyword[elif] identifier[len] ( identifier[dest] )< identifier[len] ( identifier[source] ):
identifier[r] . identifier[append] ([ keyword[None] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[source] )-
identifier[len] ( identifier[dest] ))])
keyword[for] identifier[index] , identifier[value] keyword[in] identifier[enumerate] ( identifier[r] ):
keyword[if] ( keyword[not] identifier[value] ) keyword[and] identifier[s] [ identifier[index] ]:
identifier[r] [ identifier[index] ]= identifier[s] [ identifier[index] ]
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[dict] ) keyword[and] identifier[isinstance] ( identifier[s] [ identifier[index] ], identifier[dict] ):
identifier[stack] . identifier[append] (( identifier[dest] [ identifier[index] ], identifier[source] [ identifier[index] ]))
keyword[else] :
identifier[dest] [ identifier[index] ]= identifier[s] [ identifier[index] ]
identifier[current_dst] [ identifier[key] ]= identifier[r]
keyword[else] :
identifier[current_dst] [ identifier[key] ]= identifier[source]
keyword[return] identifier[dst] | def merge_dictionary(dst, src):
"""Recursive merge two dicts (vs .update which overwrites the hashes at the
root level)
Note: This updates dst.
Copied from checkmate.utils
"""
stack = [(dst, src)]
while stack:
(current_dst, current_src) = stack.pop()
for key in current_src:
source = current_src[key]
if key not in current_dst:
current_dst[key] = source # depends on [control=['if'], data=['key', 'current_dst']]
else:
dest = current_dst[key]
if isinstance(source, dict) and isinstance(dest, dict):
stack.append((dest, source)) # depends on [control=['if'], data=[]]
elif isinstance(source, list) and isinstance(dest, list):
# Make them the same size
r = dest[:]
s = source[:]
if len(dest) > len(source):
s.append([None for i in range(len(dest) - len(source))]) # depends on [control=['if'], data=[]]
elif len(dest) < len(source):
r.append([None for i in range(len(source) - len(dest))]) # depends on [control=['if'], data=[]]
# Merge lists
for (index, value) in enumerate(r):
if not value and s[index]:
r[index] = s[index] # depends on [control=['if'], data=[]]
elif isinstance(value, dict) and isinstance(s[index], dict):
stack.append((dest[index], source[index])) # depends on [control=['if'], data=[]]
else:
dest[index] = s[index] # depends on [control=['for'], data=[]]
current_dst[key] = r # depends on [control=['if'], data=[]]
else:
current_dst[key] = source # depends on [control=['for'], data=['key']] # depends on [control=['while'], data=[]]
return dst |
def cumsum(field, include_zero=False):
''' Create a Create a ``DataSpec`` dict to generate a ``CumSum`` expression
for a ``ColumnDataSource``.
Examples:
.. code-block:: python
p.wedge(start_angle=cumsum('angle', include_zero=True),
end_angle=cumsum('angle'),
...)
will generate a ``CumSum`` expressions that sum the ``"angle"`` column
of a data source. For the ``start_angle`` value, the cumulative sums
will start with a zero value. For ``start_angle``, no initial zero will
be added (i.e. the sums will start with the first angle value, and
include the last).
'''
return expr(CumSum(field=field, include_zero=include_zero)) | def function[cumsum, parameter[field, include_zero]]:
constant[ Create a Create a ``DataSpec`` dict to generate a ``CumSum`` expression
for a ``ColumnDataSource``.
Examples:
.. code-block:: python
p.wedge(start_angle=cumsum('angle', include_zero=True),
end_angle=cumsum('angle'),
...)
will generate a ``CumSum`` expressions that sum the ``"angle"`` column
of a data source. For the ``start_angle`` value, the cumulative sums
will start with a zero value. For ``start_angle``, no initial zero will
be added (i.e. the sums will start with the first angle value, and
include the last).
]
return[call[name[expr], parameter[call[name[CumSum], parameter[]]]]] | keyword[def] identifier[cumsum] ( identifier[field] , identifier[include_zero] = keyword[False] ):
literal[string]
keyword[return] identifier[expr] ( identifier[CumSum] ( identifier[field] = identifier[field] , identifier[include_zero] = identifier[include_zero] )) | def cumsum(field, include_zero=False):
""" Create a Create a ``DataSpec`` dict to generate a ``CumSum`` expression
for a ``ColumnDataSource``.
Examples:
.. code-block:: python
p.wedge(start_angle=cumsum('angle', include_zero=True),
end_angle=cumsum('angle'),
...)
will generate a ``CumSum`` expressions that sum the ``"angle"`` column
of a data source. For the ``start_angle`` value, the cumulative sums
will start with a zero value. For ``start_angle``, no initial zero will
be added (i.e. the sums will start with the first angle value, and
include the last).
"""
return expr(CumSum(field=field, include_zero=include_zero)) |
def create(Bucket,
ACL=None, LocationConstraint=None,
GrantFullControl=None,
GrantRead=None,
GrantReadACP=None,
GrantWrite=None,
GrantWriteACP=None,
region=None, key=None, keyid=None, profile=None):
'''
Given a valid config, create an S3 Bucket.
Returns {created: true} if the bucket was created and returns
{created: False} if the bucket was not created.
CLI Example:
.. code-block:: bash
salt myminion boto_s3_bucket.create my_bucket \\
GrantFullControl='emailaddress=example@example.com' \\
GrantRead='uri="http://acs.amazonaws.com/groups/global/AllUsers"' \\
GrantReadACP='emailaddress="exampl@example.com",id="2345678909876432"' \\
LocationConstraint=us-west-1
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
kwargs = {}
for arg in ('ACL', 'GrantFullControl',
'GrantRead', 'GrantReadACP',
'GrantWrite', 'GrantWriteACP'):
if locals()[arg] is not None:
kwargs[arg] = str(locals()[arg]) # future lint: disable=blacklisted-function
if LocationConstraint:
kwargs['CreateBucketConfiguration'] = {'LocationConstraint': LocationConstraint}
location = conn.create_bucket(Bucket=Bucket,
**kwargs)
conn.get_waiter("bucket_exists").wait(Bucket=Bucket)
if location:
log.info('The newly created bucket name is located at %s', location['Location'])
return {'created': True, 'name': Bucket, 'Location': location['Location']}
else:
log.warning('Bucket was not created')
return {'created': False}
except ClientError as e:
return {'created': False, 'error': __utils__['boto3.get_error'](e)} | def function[create, parameter[Bucket, ACL, LocationConstraint, GrantFullControl, GrantRead, GrantReadACP, GrantWrite, GrantWriteACP, region, key, keyid, profile]]:
constant[
Given a valid config, create an S3 Bucket.
Returns {created: true} if the bucket was created and returns
{created: False} if the bucket was not created.
CLI Example:
.. code-block:: bash
salt myminion boto_s3_bucket.create my_bucket \
GrantFullControl='emailaddress=example@example.com' \
GrantRead='uri="http://acs.amazonaws.com/groups/global/AllUsers"' \
GrantReadACP='emailaddress="exampl@example.com",id="2345678909876432"' \
LocationConstraint=us-west-1
]
<ast.Try object at 0x7da1b2035ea0> | keyword[def] identifier[create] ( identifier[Bucket] ,
identifier[ACL] = keyword[None] , identifier[LocationConstraint] = keyword[None] ,
identifier[GrantFullControl] = keyword[None] ,
identifier[GrantRead] = keyword[None] ,
identifier[GrantReadACP] = keyword[None] ,
identifier[GrantWrite] = keyword[None] ,
identifier[GrantWriteACP] = keyword[None] ,
identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ):
literal[string]
keyword[try] :
identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
identifier[kwargs] ={}
keyword[for] identifier[arg] keyword[in] ( literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] , literal[string] ):
keyword[if] identifier[locals] ()[ identifier[arg] ] keyword[is] keyword[not] keyword[None] :
identifier[kwargs] [ identifier[arg] ]= identifier[str] ( identifier[locals] ()[ identifier[arg] ])
keyword[if] identifier[LocationConstraint] :
identifier[kwargs] [ literal[string] ]={ literal[string] : identifier[LocationConstraint] }
identifier[location] = identifier[conn] . identifier[create_bucket] ( identifier[Bucket] = identifier[Bucket] ,
** identifier[kwargs] )
identifier[conn] . identifier[get_waiter] ( literal[string] ). identifier[wait] ( identifier[Bucket] = identifier[Bucket] )
keyword[if] identifier[location] :
identifier[log] . identifier[info] ( literal[string] , identifier[location] [ literal[string] ])
keyword[return] { literal[string] : keyword[True] , literal[string] : identifier[Bucket] , literal[string] : identifier[location] [ literal[string] ]}
keyword[else] :
identifier[log] . identifier[warning] ( literal[string] )
keyword[return] { literal[string] : keyword[False] }
keyword[except] identifier[ClientError] keyword[as] identifier[e] :
keyword[return] { literal[string] : keyword[False] , literal[string] : identifier[__utils__] [ literal[string] ]( identifier[e] )} | def create(Bucket, ACL=None, LocationConstraint=None, GrantFullControl=None, GrantRead=None, GrantReadACP=None, GrantWrite=None, GrantWriteACP=None, region=None, key=None, keyid=None, profile=None):
"""
Given a valid config, create an S3 Bucket.
Returns {created: true} if the bucket was created and returns
{created: False} if the bucket was not created.
CLI Example:
.. code-block:: bash
salt myminion boto_s3_bucket.create my_bucket \\
GrantFullControl='emailaddress=example@example.com' \\
GrantRead='uri="http://acs.amazonaws.com/groups/global/AllUsers"' \\
GrantReadACP='emailaddress="exampl@example.com",id="2345678909876432"' \\
LocationConstraint=us-west-1
"""
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
kwargs = {}
for arg in ('ACL', 'GrantFullControl', 'GrantRead', 'GrantReadACP', 'GrantWrite', 'GrantWriteACP'):
if locals()[arg] is not None:
kwargs[arg] = str(locals()[arg]) # future lint: disable=blacklisted-function # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['arg']]
if LocationConstraint:
kwargs['CreateBucketConfiguration'] = {'LocationConstraint': LocationConstraint} # depends on [control=['if'], data=[]]
location = conn.create_bucket(Bucket=Bucket, **kwargs)
conn.get_waiter('bucket_exists').wait(Bucket=Bucket)
if location:
log.info('The newly created bucket name is located at %s', location['Location'])
return {'created': True, 'name': Bucket, 'Location': location['Location']} # depends on [control=['if'], data=[]]
else:
log.warning('Bucket was not created')
return {'created': False} # depends on [control=['try'], data=[]]
except ClientError as e:
return {'created': False, 'error': __utils__['boto3.get_error'](e)} # depends on [control=['except'], data=['e']] |
def cli(ctx, ids, query, filters, details, interval):
"""Watch for new alerts."""
if details:
display = 'details'
else:
display = 'compact'
from_date = None
auto_refresh = True
while auto_refresh:
try:
auto_refresh, from_date = ctx.invoke(query_cmd, ids=ids, query=query,
filters=filters, display=display, from_date=from_date)
time.sleep(interval)
except (KeyboardInterrupt, SystemExit) as e:
sys.exit(e) | def function[cli, parameter[ctx, ids, query, filters, details, interval]]:
constant[Watch for new alerts.]
if name[details] begin[:]
variable[display] assign[=] constant[details]
variable[from_date] assign[=] constant[None]
variable[auto_refresh] assign[=] constant[True]
while name[auto_refresh] begin[:]
<ast.Try object at 0x7da2044c0730> | keyword[def] identifier[cli] ( identifier[ctx] , identifier[ids] , identifier[query] , identifier[filters] , identifier[details] , identifier[interval] ):
literal[string]
keyword[if] identifier[details] :
identifier[display] = literal[string]
keyword[else] :
identifier[display] = literal[string]
identifier[from_date] = keyword[None]
identifier[auto_refresh] = keyword[True]
keyword[while] identifier[auto_refresh] :
keyword[try] :
identifier[auto_refresh] , identifier[from_date] = identifier[ctx] . identifier[invoke] ( identifier[query_cmd] , identifier[ids] = identifier[ids] , identifier[query] = identifier[query] ,
identifier[filters] = identifier[filters] , identifier[display] = identifier[display] , identifier[from_date] = identifier[from_date] )
identifier[time] . identifier[sleep] ( identifier[interval] )
keyword[except] ( identifier[KeyboardInterrupt] , identifier[SystemExit] ) keyword[as] identifier[e] :
identifier[sys] . identifier[exit] ( identifier[e] ) | def cli(ctx, ids, query, filters, details, interval):
"""Watch for new alerts."""
if details:
display = 'details' # depends on [control=['if'], data=[]]
else:
display = 'compact'
from_date = None
auto_refresh = True
while auto_refresh:
try:
(auto_refresh, from_date) = ctx.invoke(query_cmd, ids=ids, query=query, filters=filters, display=display, from_date=from_date)
time.sleep(interval) # depends on [control=['try'], data=[]]
except (KeyboardInterrupt, SystemExit) as e:
sys.exit(e) # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]] |
def mutable(obj):
'''
return a mutable proxy for the `obj`.
all modify on the proxy will not apply on origin object.
'''
base_cls = type(obj)
class Proxy(base_cls):
def __getattribute__(self, name):
try:
return super().__getattribute__(name)
except AttributeError:
return getattr(obj, name)
update_wrapper(Proxy, base_cls, updated = ())
return Proxy() | def function[mutable, parameter[obj]]:
constant[
return a mutable proxy for the `obj`.
all modify on the proxy will not apply on origin object.
]
variable[base_cls] assign[=] call[name[type], parameter[name[obj]]]
class class[Proxy, parameter[]] begin[:]
def function[__getattribute__, parameter[self, name]]:
<ast.Try object at 0x7da18f09ebc0>
call[name[update_wrapper], parameter[name[Proxy], name[base_cls]]]
return[call[name[Proxy], parameter[]]] | keyword[def] identifier[mutable] ( identifier[obj] ):
literal[string]
identifier[base_cls] = identifier[type] ( identifier[obj] )
keyword[class] identifier[Proxy] ( identifier[base_cls] ):
keyword[def] identifier[__getattribute__] ( identifier[self] , identifier[name] ):
keyword[try] :
keyword[return] identifier[super] (). identifier[__getattribute__] ( identifier[name] )
keyword[except] identifier[AttributeError] :
keyword[return] identifier[getattr] ( identifier[obj] , identifier[name] )
identifier[update_wrapper] ( identifier[Proxy] , identifier[base_cls] , identifier[updated] =())
keyword[return] identifier[Proxy] () | def mutable(obj):
"""
return a mutable proxy for the `obj`.
all modify on the proxy will not apply on origin object.
"""
base_cls = type(obj)
class Proxy(base_cls):
def __getattribute__(self, name):
try:
return super().__getattribute__(name) # depends on [control=['try'], data=[]]
except AttributeError:
return getattr(obj, name) # depends on [control=['except'], data=[]]
update_wrapper(Proxy, base_cls, updated=())
return Proxy() |
def get_parent_gradebook_nodes(self):
"""Gets the parents of this gradebook.
return: (osid.grading.GradebookNodeList) - the parents of the
``id``
*compliance: mandatory -- This method must be implemented.*
"""
parent_gradebook_nodes = []
for node in self._my_map['parentNodes']:
parent_gradebook_nodes.append(GradebookNode(
node._my_map,
runtime=self._runtime,
proxy=self._proxy,
lookup_session=self._lookup_session))
return GradebookNodeList(parent_gradebook_nodes) | def function[get_parent_gradebook_nodes, parameter[self]]:
constant[Gets the parents of this gradebook.
return: (osid.grading.GradebookNodeList) - the parents of the
``id``
*compliance: mandatory -- This method must be implemented.*
]
variable[parent_gradebook_nodes] assign[=] list[[]]
for taget[name[node]] in starred[call[name[self]._my_map][constant[parentNodes]]] begin[:]
call[name[parent_gradebook_nodes].append, parameter[call[name[GradebookNode], parameter[name[node]._my_map]]]]
return[call[name[GradebookNodeList], parameter[name[parent_gradebook_nodes]]]] | keyword[def] identifier[get_parent_gradebook_nodes] ( identifier[self] ):
literal[string]
identifier[parent_gradebook_nodes] =[]
keyword[for] identifier[node] keyword[in] identifier[self] . identifier[_my_map] [ literal[string] ]:
identifier[parent_gradebook_nodes] . identifier[append] ( identifier[GradebookNode] (
identifier[node] . identifier[_my_map] ,
identifier[runtime] = identifier[self] . identifier[_runtime] ,
identifier[proxy] = identifier[self] . identifier[_proxy] ,
identifier[lookup_session] = identifier[self] . identifier[_lookup_session] ))
keyword[return] identifier[GradebookNodeList] ( identifier[parent_gradebook_nodes] ) | def get_parent_gradebook_nodes(self):
"""Gets the parents of this gradebook.
return: (osid.grading.GradebookNodeList) - the parents of the
``id``
*compliance: mandatory -- This method must be implemented.*
"""
parent_gradebook_nodes = []
for node in self._my_map['parentNodes']:
parent_gradebook_nodes.append(GradebookNode(node._my_map, runtime=self._runtime, proxy=self._proxy, lookup_session=self._lookup_session)) # depends on [control=['for'], data=['node']]
return GradebookNodeList(parent_gradebook_nodes) |
def from_file(self, filename, table=None, delimiter='|', null='NULL',
panic=True, quotechar='"', parse_dates=False):
"""
Load from a file into the target table, handling each step of the
load process.
Can load from text files, and properly formatted giraffez archive
files. In both cases, if Gzip compression is detected the file will be
decompressed while reading and handled appropriately. The encoding is
determined automatically by the contents of the file.
It is not necessary to set the columns in use prior to loading from a file.
In the case of a text file, the header is used to determine column names
and their order. Valid delimiters include '|', ',', and '\\t' (tab). When
loading an archive file, the column information is decoded alongside the data.
:param str filename: The location of the file to be loaded
:param str table: The name of the target table, if it was not specified
to the constructor for the isntance
:param str null: The string that indicates a null value in the rows being
inserted from a file. Defaults to 'NULL'
:param str delimiter: When loading a file, indicates that fields are
separated by this delimiter. Defaults to :code:`None`, which causes the
delimiter to be determined from the header of the file. In most
cases, this behavior is sufficient
:param str quotechar: The character used to quote fields containing special characters,
like the delimiter.
:param bool panic: If :code:`True`, when an error is encountered it will be
raised. Otherwise, the error will be logged and :code:`self.error_count`
is incremented.
:return: The output of the call to
:meth:`~giraffez.load.TeradataBulkLoad.finish`
:raises `giraffez.errors.GiraffeError`: if table was not set and :code:`table`
is :code:`None`, or if a Teradata error ocurred while retrieving table info.
:raises `giraffez.errors.GiraffeEncodeError`: if :code:`panic` is :code:`True` and there
are format errors in the row values.
"""
if not self.table:
if not table:
raise GiraffeError("Table must be set or specified to load a file.")
self.table = table
if not isinstance(null, basestring):
raise GiraffeError("Expected 'null' to be str, received {}".format(type(null)))
with Reader(filename, delimiter=delimiter, quotechar=quotechar) as f:
if not isinstance(f.delimiter, basestring):
raise GiraffeError("Expected 'delimiter' to be str, received {}".format(type(delimiter)))
self.columns = f.header
if isinstance(f, ArchiveFileReader):
self.mload.set_encoding(ROW_ENCODING_RAW)
self.preprocessor = lambda s: s
if parse_dates:
self.preprocessor = DateHandler(self.columns)
self._initiate()
self.mload.set_null(null)
self.mload.set_delimiter(delimiter)
i = 0
for i, line in enumerate(f, 1):
self.put(line, panic=panic)
if i % self.checkpoint_interval == 1:
log.info("\rBulkLoad", "Processed {} rows".format(i), console=True)
checkpoint_status = self.checkpoint()
self.exit_code = self._exit_code()
if self.exit_code != 0:
return self.exit_code
log.info("\rBulkLoad", "Processed {} rows".format(i))
return self.finish() | def function[from_file, parameter[self, filename, table, delimiter, null, panic, quotechar, parse_dates]]:
constant[
Load from a file into the target table, handling each step of the
load process.
Can load from text files, and properly formatted giraffez archive
files. In both cases, if Gzip compression is detected the file will be
decompressed while reading and handled appropriately. The encoding is
determined automatically by the contents of the file.
It is not necessary to set the columns in use prior to loading from a file.
In the case of a text file, the header is used to determine column names
and their order. Valid delimiters include '|', ',', and '\t' (tab). When
loading an archive file, the column information is decoded alongside the data.
:param str filename: The location of the file to be loaded
:param str table: The name of the target table, if it was not specified
to the constructor for the isntance
:param str null: The string that indicates a null value in the rows being
inserted from a file. Defaults to 'NULL'
:param str delimiter: When loading a file, indicates that fields are
separated by this delimiter. Defaults to :code:`None`, which causes the
delimiter to be determined from the header of the file. In most
cases, this behavior is sufficient
:param str quotechar: The character used to quote fields containing special characters,
like the delimiter.
:param bool panic: If :code:`True`, when an error is encountered it will be
raised. Otherwise, the error will be logged and :code:`self.error_count`
is incremented.
:return: The output of the call to
:meth:`~giraffez.load.TeradataBulkLoad.finish`
:raises `giraffez.errors.GiraffeError`: if table was not set and :code:`table`
is :code:`None`, or if a Teradata error ocurred while retrieving table info.
:raises `giraffez.errors.GiraffeEncodeError`: if :code:`panic` is :code:`True` and there
are format errors in the row values.
]
if <ast.UnaryOp object at 0x7da18eb56da0> begin[:]
if <ast.UnaryOp object at 0x7da18eb54760> begin[:]
<ast.Raise object at 0x7da18eb57cd0>
name[self].table assign[=] name[table]
if <ast.UnaryOp object at 0x7da18eb56f20> begin[:]
<ast.Raise object at 0x7da18eb56bc0>
with call[name[Reader], parameter[name[filename]]] begin[:]
if <ast.UnaryOp object at 0x7da18eb560b0> begin[:]
<ast.Raise object at 0x7da18eb56e90>
name[self].columns assign[=] name[f].header
if call[name[isinstance], parameter[name[f], name[ArchiveFileReader]]] begin[:]
call[name[self].mload.set_encoding, parameter[name[ROW_ENCODING_RAW]]]
name[self].preprocessor assign[=] <ast.Lambda object at 0x7da1b02c10c0>
if name[parse_dates] begin[:]
name[self].preprocessor assign[=] call[name[DateHandler], parameter[name[self].columns]]
call[name[self]._initiate, parameter[]]
call[name[self].mload.set_null, parameter[name[null]]]
call[name[self].mload.set_delimiter, parameter[name[delimiter]]]
variable[i] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b02c1480>, <ast.Name object at 0x7da1b02c3490>]]] in starred[call[name[enumerate], parameter[name[f], constant[1]]]] begin[:]
call[name[self].put, parameter[name[line]]]
if compare[binary_operation[name[i] <ast.Mod object at 0x7da2590d6920> name[self].checkpoint_interval] equal[==] constant[1]] begin[:]
call[name[log].info, parameter[constant[
BulkLoad], call[constant[Processed {} rows].format, parameter[name[i]]]]]
variable[checkpoint_status] assign[=] call[name[self].checkpoint, parameter[]]
name[self].exit_code assign[=] call[name[self]._exit_code, parameter[]]
if compare[name[self].exit_code not_equal[!=] constant[0]] begin[:]
return[name[self].exit_code]
call[name[log].info, parameter[constant[
BulkLoad], call[constant[Processed {} rows].format, parameter[name[i]]]]]
return[call[name[self].finish, parameter[]]] | keyword[def] identifier[from_file] ( identifier[self] , identifier[filename] , identifier[table] = keyword[None] , identifier[delimiter] = literal[string] , identifier[null] = literal[string] ,
identifier[panic] = keyword[True] , identifier[quotechar] = literal[string] , identifier[parse_dates] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[table] :
keyword[if] keyword[not] identifier[table] :
keyword[raise] identifier[GiraffeError] ( literal[string] )
identifier[self] . identifier[table] = identifier[table]
keyword[if] keyword[not] identifier[isinstance] ( identifier[null] , identifier[basestring] ):
keyword[raise] identifier[GiraffeError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[null] )))
keyword[with] identifier[Reader] ( identifier[filename] , identifier[delimiter] = identifier[delimiter] , identifier[quotechar] = identifier[quotechar] ) keyword[as] identifier[f] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[f] . identifier[delimiter] , identifier[basestring] ):
keyword[raise] identifier[GiraffeError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[delimiter] )))
identifier[self] . identifier[columns] = identifier[f] . identifier[header]
keyword[if] identifier[isinstance] ( identifier[f] , identifier[ArchiveFileReader] ):
identifier[self] . identifier[mload] . identifier[set_encoding] ( identifier[ROW_ENCODING_RAW] )
identifier[self] . identifier[preprocessor] = keyword[lambda] identifier[s] : identifier[s]
keyword[if] identifier[parse_dates] :
identifier[self] . identifier[preprocessor] = identifier[DateHandler] ( identifier[self] . identifier[columns] )
identifier[self] . identifier[_initiate] ()
identifier[self] . identifier[mload] . identifier[set_null] ( identifier[null] )
identifier[self] . identifier[mload] . identifier[set_delimiter] ( identifier[delimiter] )
identifier[i] = literal[int]
keyword[for] identifier[i] , identifier[line] keyword[in] identifier[enumerate] ( identifier[f] , literal[int] ):
identifier[self] . identifier[put] ( identifier[line] , identifier[panic] = identifier[panic] )
keyword[if] identifier[i] % identifier[self] . identifier[checkpoint_interval] == literal[int] :
identifier[log] . identifier[info] ( literal[string] , literal[string] . identifier[format] ( identifier[i] ), identifier[console] = keyword[True] )
identifier[checkpoint_status] = identifier[self] . identifier[checkpoint] ()
identifier[self] . identifier[exit_code] = identifier[self] . identifier[_exit_code] ()
keyword[if] identifier[self] . identifier[exit_code] != literal[int] :
keyword[return] identifier[self] . identifier[exit_code]
identifier[log] . identifier[info] ( literal[string] , literal[string] . identifier[format] ( identifier[i] ))
keyword[return] identifier[self] . identifier[finish] () | def from_file(self, filename, table=None, delimiter='|', null='NULL', panic=True, quotechar='"', parse_dates=False):
"""
Load from a file into the target table, handling each step of the
load process.
Can load from text files, and properly formatted giraffez archive
files. In both cases, if Gzip compression is detected the file will be
decompressed while reading and handled appropriately. The encoding is
determined automatically by the contents of the file.
It is not necessary to set the columns in use prior to loading from a file.
In the case of a text file, the header is used to determine column names
and their order. Valid delimiters include '|', ',', and '\\t' (tab). When
loading an archive file, the column information is decoded alongside the data.
:param str filename: The location of the file to be loaded
:param str table: The name of the target table, if it was not specified
to the constructor for the isntance
:param str null: The string that indicates a null value in the rows being
inserted from a file. Defaults to 'NULL'
:param str delimiter: When loading a file, indicates that fields are
separated by this delimiter. Defaults to :code:`None`, which causes the
delimiter to be determined from the header of the file. In most
cases, this behavior is sufficient
:param str quotechar: The character used to quote fields containing special characters,
like the delimiter.
:param bool panic: If :code:`True`, when an error is encountered it will be
raised. Otherwise, the error will be logged and :code:`self.error_count`
is incremented.
:return: The output of the call to
:meth:`~giraffez.load.TeradataBulkLoad.finish`
:raises `giraffez.errors.GiraffeError`: if table was not set and :code:`table`
is :code:`None`, or if a Teradata error ocurred while retrieving table info.
:raises `giraffez.errors.GiraffeEncodeError`: if :code:`panic` is :code:`True` and there
are format errors in the row values.
"""
if not self.table:
if not table:
raise GiraffeError('Table must be set or specified to load a file.') # depends on [control=['if'], data=[]]
self.table = table # depends on [control=['if'], data=[]]
if not isinstance(null, basestring):
raise GiraffeError("Expected 'null' to be str, received {}".format(type(null))) # depends on [control=['if'], data=[]]
with Reader(filename, delimiter=delimiter, quotechar=quotechar) as f:
if not isinstance(f.delimiter, basestring):
raise GiraffeError("Expected 'delimiter' to be str, received {}".format(type(delimiter))) # depends on [control=['if'], data=[]]
self.columns = f.header
if isinstance(f, ArchiveFileReader):
self.mload.set_encoding(ROW_ENCODING_RAW)
self.preprocessor = lambda s: s # depends on [control=['if'], data=[]]
if parse_dates:
self.preprocessor = DateHandler(self.columns) # depends on [control=['if'], data=[]]
self._initiate()
self.mload.set_null(null)
self.mload.set_delimiter(delimiter)
i = 0
for (i, line) in enumerate(f, 1):
self.put(line, panic=panic)
if i % self.checkpoint_interval == 1:
log.info('\rBulkLoad', 'Processed {} rows'.format(i), console=True)
checkpoint_status = self.checkpoint()
self.exit_code = self._exit_code()
if self.exit_code != 0:
return self.exit_code # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
log.info('\rBulkLoad', 'Processed {} rows'.format(i))
return self.finish() # depends on [control=['with'], data=['f']] |
def from_polar(r, theta, phi):
"""Convert ``(r, theta, phi)`` to Cartesian coordinates ``[x y z]``.
``r`` - vector length
``theta`` - angle above (+) or below (-) the xy-plane
``phi`` - angle around the z-axis
The meaning and order of the three polar parameters is designed to
match both ISO 31-11 and the traditional order used by physicists.
Mathematicians usually define ``theta`` and ``phi`` the other way
around, and may need to use caution when calling this function.
See: https://en.wikipedia.org/wiki/Spherical_coordinate_system
"""
rxy = r * cos(theta)
return array((rxy * cos(phi), rxy * sin(phi), r * sin(theta))) | def function[from_polar, parameter[r, theta, phi]]:
constant[Convert ``(r, theta, phi)`` to Cartesian coordinates ``[x y z]``.
``r`` - vector length
``theta`` - angle above (+) or below (-) the xy-plane
``phi`` - angle around the z-axis
The meaning and order of the three polar parameters is designed to
match both ISO 31-11 and the traditional order used by physicists.
Mathematicians usually define ``theta`` and ``phi`` the other way
around, and may need to use caution when calling this function.
See: https://en.wikipedia.org/wiki/Spherical_coordinate_system
]
variable[rxy] assign[=] binary_operation[name[r] * call[name[cos], parameter[name[theta]]]]
return[call[name[array], parameter[tuple[[<ast.BinOp object at 0x7da1b179aa40>, <ast.BinOp object at 0x7da1b17987c0>, <ast.BinOp object at 0x7da1b179b9d0>]]]]] | keyword[def] identifier[from_polar] ( identifier[r] , identifier[theta] , identifier[phi] ):
literal[string]
identifier[rxy] = identifier[r] * identifier[cos] ( identifier[theta] )
keyword[return] identifier[array] (( identifier[rxy] * identifier[cos] ( identifier[phi] ), identifier[rxy] * identifier[sin] ( identifier[phi] ), identifier[r] * identifier[sin] ( identifier[theta] ))) | def from_polar(r, theta, phi):
"""Convert ``(r, theta, phi)`` to Cartesian coordinates ``[x y z]``.
``r`` - vector length
``theta`` - angle above (+) or below (-) the xy-plane
``phi`` - angle around the z-axis
The meaning and order of the three polar parameters is designed to
match both ISO 31-11 and the traditional order used by physicists.
Mathematicians usually define ``theta`` and ``phi`` the other way
around, and may need to use caution when calling this function.
See: https://en.wikipedia.org/wiki/Spherical_coordinate_system
"""
rxy = r * cos(theta)
return array((rxy * cos(phi), rxy * sin(phi), r * sin(theta))) |
def link_contentkey_authorization_policy(access_token, ckap_id, options_id, \
ams_redirected_rest_endpoint):
'''Link Media Service Content Key Authorization Policy.
Args:
access_token (str): A valid Azure authentication token.
ckap_id (str): A Media Service Asset Content Key Authorization Policy ID.
options_id (str): A Media Service Content Key Authorization Policy Options .
ams_redirected_rest_endpoint (str): A Media Service Redirected Endpoint.
Returns:
HTTP response. JSON body.
'''
path = '/ContentKeyAuthorizationPolicies'
full_path = ''.join([path, "('", ckap_id, "')", "/$links/Options"])
full_path_encoded = urllib.parse.quote(full_path, safe='')
endpoint = ''.join([ams_rest_endpoint, full_path_encoded])
uri = ''.join([ams_redirected_rest_endpoint, 'ContentKeyAuthorizationPolicyOptions', \
"('", options_id, "')"])
body = '{"uri": "' + uri + '"}'
return do_ams_post(endpoint, full_path_encoded, body, access_token, "json_only", "1.0;NetFx") | def function[link_contentkey_authorization_policy, parameter[access_token, ckap_id, options_id, ams_redirected_rest_endpoint]]:
constant[Link Media Service Content Key Authorization Policy.
Args:
access_token (str): A valid Azure authentication token.
ckap_id (str): A Media Service Asset Content Key Authorization Policy ID.
options_id (str): A Media Service Content Key Authorization Policy Options .
ams_redirected_rest_endpoint (str): A Media Service Redirected Endpoint.
Returns:
HTTP response. JSON body.
]
variable[path] assign[=] constant[/ContentKeyAuthorizationPolicies]
variable[full_path] assign[=] call[constant[].join, parameter[list[[<ast.Name object at 0x7da1b055e080>, <ast.Constant object at 0x7da1b055e2c0>, <ast.Name object at 0x7da1b055dae0>, <ast.Constant object at 0x7da1b055de70>, <ast.Constant object at 0x7da1b055e7a0>]]]]
variable[full_path_encoded] assign[=] call[name[urllib].parse.quote, parameter[name[full_path]]]
variable[endpoint] assign[=] call[constant[].join, parameter[list[[<ast.Name object at 0x7da1b055e020>, <ast.Name object at 0x7da1b055d4b0>]]]]
variable[uri] assign[=] call[constant[].join, parameter[list[[<ast.Name object at 0x7da1b04ca290>, <ast.Constant object at 0x7da1b04ca7a0>, <ast.Constant object at 0x7da1b04ca020>, <ast.Name object at 0x7da1b04c9630>, <ast.Constant object at 0x7da1b04cb250>]]]]
variable[body] assign[=] binary_operation[binary_operation[constant[{"uri": "] + name[uri]] + constant["}]]
return[call[name[do_ams_post], parameter[name[endpoint], name[full_path_encoded], name[body], name[access_token], constant[json_only], constant[1.0;NetFx]]]] | keyword[def] identifier[link_contentkey_authorization_policy] ( identifier[access_token] , identifier[ckap_id] , identifier[options_id] , identifier[ams_redirected_rest_endpoint] ):
literal[string]
identifier[path] = literal[string]
identifier[full_path] = literal[string] . identifier[join] ([ identifier[path] , literal[string] , identifier[ckap_id] , literal[string] , literal[string] ])
identifier[full_path_encoded] = identifier[urllib] . identifier[parse] . identifier[quote] ( identifier[full_path] , identifier[safe] = literal[string] )
identifier[endpoint] = literal[string] . identifier[join] ([ identifier[ams_rest_endpoint] , identifier[full_path_encoded] ])
identifier[uri] = literal[string] . identifier[join] ([ identifier[ams_redirected_rest_endpoint] , literal[string] , literal[string] , identifier[options_id] , literal[string] ])
identifier[body] = literal[string] + identifier[uri] + literal[string]
keyword[return] identifier[do_ams_post] ( identifier[endpoint] , identifier[full_path_encoded] , identifier[body] , identifier[access_token] , literal[string] , literal[string] ) | def link_contentkey_authorization_policy(access_token, ckap_id, options_id, ams_redirected_rest_endpoint):
"""Link Media Service Content Key Authorization Policy.
Args:
access_token (str): A valid Azure authentication token.
ckap_id (str): A Media Service Asset Content Key Authorization Policy ID.
options_id (str): A Media Service Content Key Authorization Policy Options .
ams_redirected_rest_endpoint (str): A Media Service Redirected Endpoint.
Returns:
HTTP response. JSON body.
"""
path = '/ContentKeyAuthorizationPolicies'
full_path = ''.join([path, "('", ckap_id, "')", '/$links/Options'])
full_path_encoded = urllib.parse.quote(full_path, safe='')
endpoint = ''.join([ams_rest_endpoint, full_path_encoded])
uri = ''.join([ams_redirected_rest_endpoint, 'ContentKeyAuthorizationPolicyOptions', "('", options_id, "')"])
body = '{"uri": "' + uri + '"}'
return do_ams_post(endpoint, full_path_encoded, body, access_token, 'json_only', '1.0;NetFx') |
def neighborhood_cortical_magnification(mesh, coordinates):
'''
neighborhood_cortical_magnification(mesh, visual_coordinates) yields a list of neighborhood-
based cortical magnification values for the vertices in the given mesh if their visual field
coordinates are given by the visual_coordinates matrix (must be like [x_values, y_values]). If
either x-value or y-value of a coordinate is either None or numpy.nan, then that cortical
magnification value is numpy.nan.
'''
idcs = _cmag_coord_idcs(coordinates)
neis = mesh.tess.indexed_neighborhoods
coords_vis = np.asarray(coordinates if len(coordinates) == 2 else coordinates.T)
coords_srf = mesh.coordinates
res = np.full((mesh.vertex_count, 3), np.nan, dtype=np.float)
res = np.array([row for row in [(np.nan,np.nan,np.nan)] for _ in range(mesh.vertex_count)],
dtype=np.float)
for idx in idcs:
nei = neis[idx]
pts_vis = coords_vis[:,nei]
pts_srf = coords_srf[:,nei]
x0_vis = coords_vis[:,idx]
x0_srf = coords_srf[:,idx]
if any(u is None or np.isnan(u) for pt in pts_vis for u in pt): continue
# find tangential, radial, and areal magnifications
x0col_vis = np.asarray([x0_vis]).T
x0col_srf = np.asarray([x0_srf]).T
# areal is easy
voronoi_vis = (pts_vis - x0col_vis) * 0.5 + x0col_vis
voronoi_srf = (pts_srf - x0col_srf) * 0.5 + x0col_srf
area_vis = np.sum([geo.triangle_area(x0_vis, a, b)
for (a,b) in zip(voronoi_vis.T, np.roll(voronoi_vis, 1, axis=1).T)])
area_srf = np.sum([geo.triangle_area(x0_srf, a, b)
for (a,b) in zip(voronoi_srf.T, np.roll(voronoi_srf, 1, axis=1).T)])
res[idx,2] = np.inf if np.isclose(area_vis, 0) else area_srf/area_vis
# radial and tangentual we do together because they are very similar:
# find the intersection lines then add up their distances along the cortex
pts_vis = voronoi_vis
pts_srf = voronoi_srf
segs_srf = (pts_srf, np.roll(pts_srf, -1, axis=1))
segs_vis = (pts_vis, np.roll(pts_vis, -1, axis=1))
segs_vis_t = np.transpose(segs_vis, (2,0,1))
segs_srf_t = np.transpose(segs_srf, (2,0,1))
x0norm_vis = npla.norm(x0_vis)
if not np.isclose(x0norm_vis, 0):
dirvecs = x0_vis / x0norm_vis
dirvecs = np.asarray([dirvecs, [-dirvecs[1], dirvecs[0]]])
for dirno in [0,1]:
dirvec = dirvecs[dirno]
line = (x0_vis, x0_vis + dirvec)
try:
isects_vis = np.asarray(geo.line_segment_intersection_2D(line, segs_vis))
# okay, these will all be nan but two of them; they are the points we care about
isect_idcs = np.unique(np.where(np.logical_not(np.isnan(isects_vis)))[1])
except Exception:
isect_idcs = []
if len(isect_idcs) != 2:
res[idx,dirno] = np.nan
continue
isects_vis = isects_vis[:,isect_idcs].T
# we need the distance in visual space
len_vis = npla.norm(isects_vis[0] - isects_vis[1])
if np.isclose(len_vis, 0): res[idx,dirno] = np.inf
else:
# we also need the distances on the surface: find the points by projection
fsegs_srf = segs_srf_t[isect_idcs]
fsegs_vis = segs_vis_t[isect_idcs]
s02lens_vis = npla.norm(fsegs_vis[:,0] - fsegs_vis[:,1], axis=1)
s01lens_vis = npla.norm(fsegs_vis[:,0] - isects_vis, axis=1)
vecs_srf = fsegs_srf[:,1] - fsegs_srf[:,0]
s02lens_srf = npla.norm(vecs_srf, axis=1)
isects_srf = np.transpose([(s01lens_vis/s02lens_vis)]) * vecs_srf \
+ fsegs_srf[:,0]
len_srf = np.sum(npla.norm(isects_srf - x0_srf, axis=1))
res[idx,dirno] = len_srf / len_vis
# That's it!
return res | def function[neighborhood_cortical_magnification, parameter[mesh, coordinates]]:
constant[
neighborhood_cortical_magnification(mesh, visual_coordinates) yields a list of neighborhood-
based cortical magnification values for the vertices in the given mesh if their visual field
coordinates are given by the visual_coordinates matrix (must be like [x_values, y_values]). If
either x-value or y-value of a coordinate is either None or numpy.nan, then that cortical
magnification value is numpy.nan.
]
variable[idcs] assign[=] call[name[_cmag_coord_idcs], parameter[name[coordinates]]]
variable[neis] assign[=] name[mesh].tess.indexed_neighborhoods
variable[coords_vis] assign[=] call[name[np].asarray, parameter[<ast.IfExp object at 0x7da18f09ceb0>]]
variable[coords_srf] assign[=] name[mesh].coordinates
variable[res] assign[=] call[name[np].full, parameter[tuple[[<ast.Attribute object at 0x7da18f09e500>, <ast.Constant object at 0x7da18f09f130>]], name[np].nan]]
variable[res] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da18f09c160>]]
for taget[name[idx]] in starred[name[idcs]] begin[:]
variable[nei] assign[=] call[name[neis]][name[idx]]
variable[pts_vis] assign[=] call[name[coords_vis]][tuple[[<ast.Slice object at 0x7da18f09f9d0>, <ast.Name object at 0x7da18f09dae0>]]]
variable[pts_srf] assign[=] call[name[coords_srf]][tuple[[<ast.Slice object at 0x7da18f09e740>, <ast.Name object at 0x7da18f09cc10>]]]
variable[x0_vis] assign[=] call[name[coords_vis]][tuple[[<ast.Slice object at 0x7da18f09cf10>, <ast.Name object at 0x7da18f09ce80>]]]
variable[x0_srf] assign[=] call[name[coords_srf]][tuple[[<ast.Slice object at 0x7da18f09ece0>, <ast.Name object at 0x7da18f09d330>]]]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da18f09d6c0>]] begin[:]
continue
variable[x0col_vis] assign[=] call[name[np].asarray, parameter[list[[<ast.Name object at 0x7da1b0e39840>]]]].T
variable[x0col_srf] assign[=] call[name[np].asarray, parameter[list[[<ast.Name object at 0x7da1b0e3a980>]]]].T
variable[voronoi_vis] assign[=] binary_operation[binary_operation[binary_operation[name[pts_vis] - name[x0col_vis]] * constant[0.5]] + name[x0col_vis]]
variable[voronoi_srf] assign[=] binary_operation[binary_operation[binary_operation[name[pts_srf] - name[x0col_srf]] * constant[0.5]] + name[x0col_srf]]
variable[area_vis] assign[=] call[name[np].sum, parameter[<ast.ListComp object at 0x7da1b0e38e50>]]
variable[area_srf] assign[=] call[name[np].sum, parameter[<ast.ListComp object at 0x7da1b0e38760>]]
call[name[res]][tuple[[<ast.Name object at 0x7da1b0e3a050>, <ast.Constant object at 0x7da1b0e38430>]]] assign[=] <ast.IfExp object at 0x7da1b0e39990>
variable[pts_vis] assign[=] name[voronoi_vis]
variable[pts_srf] assign[=] name[voronoi_srf]
variable[segs_srf] assign[=] tuple[[<ast.Name object at 0x7da1b0e3a9e0>, <ast.Call object at 0x7da1b0e3b7f0>]]
variable[segs_vis] assign[=] tuple[[<ast.Name object at 0x7da1b0e39de0>, <ast.Call object at 0x7da1b0e3bfd0>]]
variable[segs_vis_t] assign[=] call[name[np].transpose, parameter[name[segs_vis], tuple[[<ast.Constant object at 0x7da1b0e39fc0>, <ast.Constant object at 0x7da1b0e3b220>, <ast.Constant object at 0x7da1b0e3ba90>]]]]
variable[segs_srf_t] assign[=] call[name[np].transpose, parameter[name[segs_srf], tuple[[<ast.Constant object at 0x7da1b0e38970>, <ast.Constant object at 0x7da1b0e3b9a0>, <ast.Constant object at 0x7da1b0e3aec0>]]]]
variable[x0norm_vis] assign[=] call[name[npla].norm, parameter[name[x0_vis]]]
if <ast.UnaryOp object at 0x7da1b0e3bd30> begin[:]
variable[dirvecs] assign[=] binary_operation[name[x0_vis] / name[x0norm_vis]]
variable[dirvecs] assign[=] call[name[np].asarray, parameter[list[[<ast.Name object at 0x7da1b0e38940>, <ast.List object at 0x7da1b0e3be20>]]]]
for taget[name[dirno]] in starred[list[[<ast.Constant object at 0x7da1b0e39300>, <ast.Constant object at 0x7da1b0e382e0>]]] begin[:]
variable[dirvec] assign[=] call[name[dirvecs]][name[dirno]]
variable[line] assign[=] tuple[[<ast.Name object at 0x7da1b0e3bd00>, <ast.BinOp object at 0x7da1b0e39390>]]
<ast.Try object at 0x7da1b0e397b0>
if compare[call[name[len], parameter[name[isect_idcs]]] not_equal[!=] constant[2]] begin[:]
call[name[res]][tuple[[<ast.Name object at 0x7da1b0e381f0>, <ast.Name object at 0x7da1b0e398a0>]]] assign[=] name[np].nan
continue
variable[isects_vis] assign[=] call[name[isects_vis]][tuple[[<ast.Slice object at 0x7da1b0ebc6a0>, <ast.Name object at 0x7da1b0ebc130>]]].T
variable[len_vis] assign[=] call[name[npla].norm, parameter[binary_operation[call[name[isects_vis]][constant[0]] - call[name[isects_vis]][constant[1]]]]]
if call[name[np].isclose, parameter[name[len_vis], constant[0]]] begin[:]
call[name[res]][tuple[[<ast.Name object at 0x7da1b0ebc280>, <ast.Name object at 0x7da1b0ebcee0>]]] assign[=] name[np].inf
return[name[res]] | keyword[def] identifier[neighborhood_cortical_magnification] ( identifier[mesh] , identifier[coordinates] ):
literal[string]
identifier[idcs] = identifier[_cmag_coord_idcs] ( identifier[coordinates] )
identifier[neis] = identifier[mesh] . identifier[tess] . identifier[indexed_neighborhoods]
identifier[coords_vis] = identifier[np] . identifier[asarray] ( identifier[coordinates] keyword[if] identifier[len] ( identifier[coordinates] )== literal[int] keyword[else] identifier[coordinates] . identifier[T] )
identifier[coords_srf] = identifier[mesh] . identifier[coordinates]
identifier[res] = identifier[np] . identifier[full] (( identifier[mesh] . identifier[vertex_count] , literal[int] ), identifier[np] . identifier[nan] , identifier[dtype] = identifier[np] . identifier[float] )
identifier[res] = identifier[np] . identifier[array] ([ identifier[row] keyword[for] identifier[row] keyword[in] [( identifier[np] . identifier[nan] , identifier[np] . identifier[nan] , identifier[np] . identifier[nan] )] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[mesh] . identifier[vertex_count] )],
identifier[dtype] = identifier[np] . identifier[float] )
keyword[for] identifier[idx] keyword[in] identifier[idcs] :
identifier[nei] = identifier[neis] [ identifier[idx] ]
identifier[pts_vis] = identifier[coords_vis] [:, identifier[nei] ]
identifier[pts_srf] = identifier[coords_srf] [:, identifier[nei] ]
identifier[x0_vis] = identifier[coords_vis] [:, identifier[idx] ]
identifier[x0_srf] = identifier[coords_srf] [:, identifier[idx] ]
keyword[if] identifier[any] ( identifier[u] keyword[is] keyword[None] keyword[or] identifier[np] . identifier[isnan] ( identifier[u] ) keyword[for] identifier[pt] keyword[in] identifier[pts_vis] keyword[for] identifier[u] keyword[in] identifier[pt] ): keyword[continue]
identifier[x0col_vis] = identifier[np] . identifier[asarray] ([ identifier[x0_vis] ]). identifier[T]
identifier[x0col_srf] = identifier[np] . identifier[asarray] ([ identifier[x0_srf] ]). identifier[T]
identifier[voronoi_vis] =( identifier[pts_vis] - identifier[x0col_vis] )* literal[int] + identifier[x0col_vis]
identifier[voronoi_srf] =( identifier[pts_srf] - identifier[x0col_srf] )* literal[int] + identifier[x0col_srf]
identifier[area_vis] = identifier[np] . identifier[sum] ([ identifier[geo] . identifier[triangle_area] ( identifier[x0_vis] , identifier[a] , identifier[b] )
keyword[for] ( identifier[a] , identifier[b] ) keyword[in] identifier[zip] ( identifier[voronoi_vis] . identifier[T] , identifier[np] . identifier[roll] ( identifier[voronoi_vis] , literal[int] , identifier[axis] = literal[int] ). identifier[T] )])
identifier[area_srf] = identifier[np] . identifier[sum] ([ identifier[geo] . identifier[triangle_area] ( identifier[x0_srf] , identifier[a] , identifier[b] )
keyword[for] ( identifier[a] , identifier[b] ) keyword[in] identifier[zip] ( identifier[voronoi_srf] . identifier[T] , identifier[np] . identifier[roll] ( identifier[voronoi_srf] , literal[int] , identifier[axis] = literal[int] ). identifier[T] )])
identifier[res] [ identifier[idx] , literal[int] ]= identifier[np] . identifier[inf] keyword[if] identifier[np] . identifier[isclose] ( identifier[area_vis] , literal[int] ) keyword[else] identifier[area_srf] / identifier[area_vis]
identifier[pts_vis] = identifier[voronoi_vis]
identifier[pts_srf] = identifier[voronoi_srf]
identifier[segs_srf] =( identifier[pts_srf] , identifier[np] . identifier[roll] ( identifier[pts_srf] ,- literal[int] , identifier[axis] = literal[int] ))
identifier[segs_vis] =( identifier[pts_vis] , identifier[np] . identifier[roll] ( identifier[pts_vis] ,- literal[int] , identifier[axis] = literal[int] ))
identifier[segs_vis_t] = identifier[np] . identifier[transpose] ( identifier[segs_vis] ,( literal[int] , literal[int] , literal[int] ))
identifier[segs_srf_t] = identifier[np] . identifier[transpose] ( identifier[segs_srf] ,( literal[int] , literal[int] , literal[int] ))
identifier[x0norm_vis] = identifier[npla] . identifier[norm] ( identifier[x0_vis] )
keyword[if] keyword[not] identifier[np] . identifier[isclose] ( identifier[x0norm_vis] , literal[int] ):
identifier[dirvecs] = identifier[x0_vis] / identifier[x0norm_vis]
identifier[dirvecs] = identifier[np] . identifier[asarray] ([ identifier[dirvecs] ,[- identifier[dirvecs] [ literal[int] ], identifier[dirvecs] [ literal[int] ]]])
keyword[for] identifier[dirno] keyword[in] [ literal[int] , literal[int] ]:
identifier[dirvec] = identifier[dirvecs] [ identifier[dirno] ]
identifier[line] =( identifier[x0_vis] , identifier[x0_vis] + identifier[dirvec] )
keyword[try] :
identifier[isects_vis] = identifier[np] . identifier[asarray] ( identifier[geo] . identifier[line_segment_intersection_2D] ( identifier[line] , identifier[segs_vis] ))
identifier[isect_idcs] = identifier[np] . identifier[unique] ( identifier[np] . identifier[where] ( identifier[np] . identifier[logical_not] ( identifier[np] . identifier[isnan] ( identifier[isects_vis] )))[ literal[int] ])
keyword[except] identifier[Exception] :
identifier[isect_idcs] =[]
keyword[if] identifier[len] ( identifier[isect_idcs] )!= literal[int] :
identifier[res] [ identifier[idx] , identifier[dirno] ]= identifier[np] . identifier[nan]
keyword[continue]
identifier[isects_vis] = identifier[isects_vis] [:, identifier[isect_idcs] ]. identifier[T]
identifier[len_vis] = identifier[npla] . identifier[norm] ( identifier[isects_vis] [ literal[int] ]- identifier[isects_vis] [ literal[int] ])
keyword[if] identifier[np] . identifier[isclose] ( identifier[len_vis] , literal[int] ): identifier[res] [ identifier[idx] , identifier[dirno] ]= identifier[np] . identifier[inf]
keyword[else] :
identifier[fsegs_srf] = identifier[segs_srf_t] [ identifier[isect_idcs] ]
identifier[fsegs_vis] = identifier[segs_vis_t] [ identifier[isect_idcs] ]
identifier[s02lens_vis] = identifier[npla] . identifier[norm] ( identifier[fsegs_vis] [:, literal[int] ]- identifier[fsegs_vis] [:, literal[int] ], identifier[axis] = literal[int] )
identifier[s01lens_vis] = identifier[npla] . identifier[norm] ( identifier[fsegs_vis] [:, literal[int] ]- identifier[isects_vis] , identifier[axis] = literal[int] )
identifier[vecs_srf] = identifier[fsegs_srf] [:, literal[int] ]- identifier[fsegs_srf] [:, literal[int] ]
identifier[s02lens_srf] = identifier[npla] . identifier[norm] ( identifier[vecs_srf] , identifier[axis] = literal[int] )
identifier[isects_srf] = identifier[np] . identifier[transpose] ([( identifier[s01lens_vis] / identifier[s02lens_vis] )])* identifier[vecs_srf] + identifier[fsegs_srf] [:, literal[int] ]
identifier[len_srf] = identifier[np] . identifier[sum] ( identifier[npla] . identifier[norm] ( identifier[isects_srf] - identifier[x0_srf] , identifier[axis] = literal[int] ))
identifier[res] [ identifier[idx] , identifier[dirno] ]= identifier[len_srf] / identifier[len_vis]
keyword[return] identifier[res] | def neighborhood_cortical_magnification(mesh, coordinates):
"""
neighborhood_cortical_magnification(mesh, visual_coordinates) yields a list of neighborhood-
based cortical magnification values for the vertices in the given mesh if their visual field
coordinates are given by the visual_coordinates matrix (must be like [x_values, y_values]). If
either x-value or y-value of a coordinate is either None or numpy.nan, then that cortical
magnification value is numpy.nan.
"""
idcs = _cmag_coord_idcs(coordinates)
neis = mesh.tess.indexed_neighborhoods
coords_vis = np.asarray(coordinates if len(coordinates) == 2 else coordinates.T)
coords_srf = mesh.coordinates
res = np.full((mesh.vertex_count, 3), np.nan, dtype=np.float)
res = np.array([row for row in [(np.nan, np.nan, np.nan)] for _ in range(mesh.vertex_count)], dtype=np.float)
for idx in idcs:
nei = neis[idx]
pts_vis = coords_vis[:, nei]
pts_srf = coords_srf[:, nei]
x0_vis = coords_vis[:, idx]
x0_srf = coords_srf[:, idx]
if any((u is None or np.isnan(u) for pt in pts_vis for u in pt)):
continue # depends on [control=['if'], data=[]]
# find tangential, radial, and areal magnifications
x0col_vis = np.asarray([x0_vis]).T
x0col_srf = np.asarray([x0_srf]).T
# areal is easy
voronoi_vis = (pts_vis - x0col_vis) * 0.5 + x0col_vis
voronoi_srf = (pts_srf - x0col_srf) * 0.5 + x0col_srf
area_vis = np.sum([geo.triangle_area(x0_vis, a, b) for (a, b) in zip(voronoi_vis.T, np.roll(voronoi_vis, 1, axis=1).T)])
area_srf = np.sum([geo.triangle_area(x0_srf, a, b) for (a, b) in zip(voronoi_srf.T, np.roll(voronoi_srf, 1, axis=1).T)])
res[idx, 2] = np.inf if np.isclose(area_vis, 0) else area_srf / area_vis
# radial and tangentual we do together because they are very similar:
# find the intersection lines then add up their distances along the cortex
pts_vis = voronoi_vis
pts_srf = voronoi_srf
segs_srf = (pts_srf, np.roll(pts_srf, -1, axis=1))
segs_vis = (pts_vis, np.roll(pts_vis, -1, axis=1))
segs_vis_t = np.transpose(segs_vis, (2, 0, 1))
segs_srf_t = np.transpose(segs_srf, (2, 0, 1))
x0norm_vis = npla.norm(x0_vis)
if not np.isclose(x0norm_vis, 0):
dirvecs = x0_vis / x0norm_vis
dirvecs = np.asarray([dirvecs, [-dirvecs[1], dirvecs[0]]])
for dirno in [0, 1]:
dirvec = dirvecs[dirno]
line = (x0_vis, x0_vis + dirvec)
try:
isects_vis = np.asarray(geo.line_segment_intersection_2D(line, segs_vis))
# okay, these will all be nan but two of them; they are the points we care about
isect_idcs = np.unique(np.where(np.logical_not(np.isnan(isects_vis)))[1]) # depends on [control=['try'], data=[]]
except Exception:
isect_idcs = [] # depends on [control=['except'], data=[]]
if len(isect_idcs) != 2:
res[idx, dirno] = np.nan
continue # depends on [control=['if'], data=[]]
isects_vis = isects_vis[:, isect_idcs].T
# we need the distance in visual space
len_vis = npla.norm(isects_vis[0] - isects_vis[1])
if np.isclose(len_vis, 0):
res[idx, dirno] = np.inf # depends on [control=['if'], data=[]]
else:
# we also need the distances on the surface: find the points by projection
fsegs_srf = segs_srf_t[isect_idcs]
fsegs_vis = segs_vis_t[isect_idcs]
s02lens_vis = npla.norm(fsegs_vis[:, 0] - fsegs_vis[:, 1], axis=1)
s01lens_vis = npla.norm(fsegs_vis[:, 0] - isects_vis, axis=1)
vecs_srf = fsegs_srf[:, 1] - fsegs_srf[:, 0]
s02lens_srf = npla.norm(vecs_srf, axis=1)
isects_srf = np.transpose([s01lens_vis / s02lens_vis]) * vecs_srf + fsegs_srf[:, 0]
len_srf = np.sum(npla.norm(isects_srf - x0_srf, axis=1))
res[idx, dirno] = len_srf / len_vis # depends on [control=['for'], data=['dirno']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['idx']]
# That's it!
return res |
def date2timestamp(dte):
'''Convert a *dte* into a valid unix timestamp.'''
seconds = mktime(dte.timetuple())
if isinstance(dte, datetime):
return seconds + dte.microsecond / 1000000.0
else:
return int(seconds) | def function[date2timestamp, parameter[dte]]:
constant[Convert a *dte* into a valid unix timestamp.]
variable[seconds] assign[=] call[name[mktime], parameter[call[name[dte].timetuple, parameter[]]]]
if call[name[isinstance], parameter[name[dte], name[datetime]]] begin[:]
return[binary_operation[name[seconds] + binary_operation[name[dte].microsecond / constant[1000000.0]]]] | keyword[def] identifier[date2timestamp] ( identifier[dte] ):
literal[string]
identifier[seconds] = identifier[mktime] ( identifier[dte] . identifier[timetuple] ())
keyword[if] identifier[isinstance] ( identifier[dte] , identifier[datetime] ):
keyword[return] identifier[seconds] + identifier[dte] . identifier[microsecond] / literal[int]
keyword[else] :
keyword[return] identifier[int] ( identifier[seconds] ) | def date2timestamp(dte):
"""Convert a *dte* into a valid unix timestamp."""
seconds = mktime(dte.timetuple())
if isinstance(dte, datetime):
return seconds + dte.microsecond / 1000000.0 # depends on [control=['if'], data=[]]
else:
return int(seconds) |
def readkmz(self, filename):
'''reads in a kmz file and returns xml nodes'''
#Strip quotation marks if neccessary
filename.strip('"')
#Open the zip file (as applicable)
if filename[-4:] == '.kml':
fo = open(filename, "r")
fstring = fo.read()
fo.close()
elif filename[-4:] == '.kmz':
zip=ZipFile(filename)
for z in zip.filelist:
if z.filename[-4:] == '.kml':
fstring=zip.read(z)
break
else:
raise Exception("Could not find kml file in %s" % filename)
else:
raise Exception("Is not a valid kml or kmz file in %s" % filename)
#send into the xml parser
kmlstring = parseString(fstring)
#get all the placenames
nodes=kmlstring.getElementsByTagName('Placemark')
return nodes | def function[readkmz, parameter[self, filename]]:
constant[reads in a kmz file and returns xml nodes]
call[name[filename].strip, parameter[constant["]]]
if compare[call[name[filename]][<ast.Slice object at 0x7da2041dac50>] equal[==] constant[.kml]] begin[:]
variable[fo] assign[=] call[name[open], parameter[name[filename], constant[r]]]
variable[fstring] assign[=] call[name[fo].read, parameter[]]
call[name[fo].close, parameter[]]
variable[kmlstring] assign[=] call[name[parseString], parameter[name[fstring]]]
variable[nodes] assign[=] call[name[kmlstring].getElementsByTagName, parameter[constant[Placemark]]]
return[name[nodes]] | keyword[def] identifier[readkmz] ( identifier[self] , identifier[filename] ):
literal[string]
identifier[filename] . identifier[strip] ( literal[string] )
keyword[if] identifier[filename] [- literal[int] :]== literal[string] :
identifier[fo] = identifier[open] ( identifier[filename] , literal[string] )
identifier[fstring] = identifier[fo] . identifier[read] ()
identifier[fo] . identifier[close] ()
keyword[elif] identifier[filename] [- literal[int] :]== literal[string] :
identifier[zip] = identifier[ZipFile] ( identifier[filename] )
keyword[for] identifier[z] keyword[in] identifier[zip] . identifier[filelist] :
keyword[if] identifier[z] . identifier[filename] [- literal[int] :]== literal[string] :
identifier[fstring] = identifier[zip] . identifier[read] ( identifier[z] )
keyword[break]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[filename] )
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[filename] )
identifier[kmlstring] = identifier[parseString] ( identifier[fstring] )
identifier[nodes] = identifier[kmlstring] . identifier[getElementsByTagName] ( literal[string] )
keyword[return] identifier[nodes] | def readkmz(self, filename):
"""reads in a kmz file and returns xml nodes"""
#Strip quotation marks if neccessary
filename.strip('"') #Open the zip file (as applicable)
if filename[-4:] == '.kml':
fo = open(filename, 'r')
fstring = fo.read()
fo.close() # depends on [control=['if'], data=[]]
elif filename[-4:] == '.kmz':
zip = ZipFile(filename)
for z in zip.filelist:
if z.filename[-4:] == '.kml':
fstring = zip.read(z)
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['z']]
else:
raise Exception('Could not find kml file in %s' % filename) # depends on [control=['if'], data=[]]
else:
raise Exception('Is not a valid kml or kmz file in %s' % filename)
#send into the xml parser
kmlstring = parseString(fstring)
#get all the placenames
nodes = kmlstring.getElementsByTagName('Placemark')
return nodes |
def poisson_errors(self):
"""
Return a TGraphAsymmErrors representation of this histogram where the
point y errors are Poisson.
"""
graph = Graph(self.nbins(axis=0), type='asymm')
graph.SetLineWidth(self.GetLineWidth())
graph.SetMarkerSize(self.GetMarkerSize())
chisqr = ROOT.TMath.ChisquareQuantile
npoints = 0
for bin in self.bins(overflow=False):
entries = bin.effective_entries
if entries <= 0:
continue
ey_low = entries - 0.5 * chisqr(0.1586555, 2. * entries)
ey_high = 0.5 * chisqr(
1. - 0.1586555, 2. * (entries + 1)) - entries
ex = bin.x.width / 2.
graph.SetPoint(npoints, bin.x.center, bin.value)
graph.SetPointEXlow(npoints, ex)
graph.SetPointEXhigh(npoints, ex)
graph.SetPointEYlow(npoints, ey_low)
graph.SetPointEYhigh(npoints, ey_high)
npoints += 1
graph.Set(npoints)
return graph | def function[poisson_errors, parameter[self]]:
constant[
Return a TGraphAsymmErrors representation of this histogram where the
point y errors are Poisson.
]
variable[graph] assign[=] call[name[Graph], parameter[call[name[self].nbins, parameter[]]]]
call[name[graph].SetLineWidth, parameter[call[name[self].GetLineWidth, parameter[]]]]
call[name[graph].SetMarkerSize, parameter[call[name[self].GetMarkerSize, parameter[]]]]
variable[chisqr] assign[=] name[ROOT].TMath.ChisquareQuantile
variable[npoints] assign[=] constant[0]
for taget[name[bin]] in starred[call[name[self].bins, parameter[]]] begin[:]
variable[entries] assign[=] name[bin].effective_entries
if compare[name[entries] less_or_equal[<=] constant[0]] begin[:]
continue
variable[ey_low] assign[=] binary_operation[name[entries] - binary_operation[constant[0.5] * call[name[chisqr], parameter[constant[0.1586555], binary_operation[constant[2.0] * name[entries]]]]]]
variable[ey_high] assign[=] binary_operation[binary_operation[constant[0.5] * call[name[chisqr], parameter[binary_operation[constant[1.0] - constant[0.1586555]], binary_operation[constant[2.0] * binary_operation[name[entries] + constant[1]]]]]] - name[entries]]
variable[ex] assign[=] binary_operation[name[bin].x.width / constant[2.0]]
call[name[graph].SetPoint, parameter[name[npoints], name[bin].x.center, name[bin].value]]
call[name[graph].SetPointEXlow, parameter[name[npoints], name[ex]]]
call[name[graph].SetPointEXhigh, parameter[name[npoints], name[ex]]]
call[name[graph].SetPointEYlow, parameter[name[npoints], name[ey_low]]]
call[name[graph].SetPointEYhigh, parameter[name[npoints], name[ey_high]]]
<ast.AugAssign object at 0x7da1b11f2bf0>
call[name[graph].Set, parameter[name[npoints]]]
return[name[graph]] | keyword[def] identifier[poisson_errors] ( identifier[self] ):
literal[string]
identifier[graph] = identifier[Graph] ( identifier[self] . identifier[nbins] ( identifier[axis] = literal[int] ), identifier[type] = literal[string] )
identifier[graph] . identifier[SetLineWidth] ( identifier[self] . identifier[GetLineWidth] ())
identifier[graph] . identifier[SetMarkerSize] ( identifier[self] . identifier[GetMarkerSize] ())
identifier[chisqr] = identifier[ROOT] . identifier[TMath] . identifier[ChisquareQuantile]
identifier[npoints] = literal[int]
keyword[for] identifier[bin] keyword[in] identifier[self] . identifier[bins] ( identifier[overflow] = keyword[False] ):
identifier[entries] = identifier[bin] . identifier[effective_entries]
keyword[if] identifier[entries] <= literal[int] :
keyword[continue]
identifier[ey_low] = identifier[entries] - literal[int] * identifier[chisqr] ( literal[int] , literal[int] * identifier[entries] )
identifier[ey_high] = literal[int] * identifier[chisqr] (
literal[int] - literal[int] , literal[int] *( identifier[entries] + literal[int] ))- identifier[entries]
identifier[ex] = identifier[bin] . identifier[x] . identifier[width] / literal[int]
identifier[graph] . identifier[SetPoint] ( identifier[npoints] , identifier[bin] . identifier[x] . identifier[center] , identifier[bin] . identifier[value] )
identifier[graph] . identifier[SetPointEXlow] ( identifier[npoints] , identifier[ex] )
identifier[graph] . identifier[SetPointEXhigh] ( identifier[npoints] , identifier[ex] )
identifier[graph] . identifier[SetPointEYlow] ( identifier[npoints] , identifier[ey_low] )
identifier[graph] . identifier[SetPointEYhigh] ( identifier[npoints] , identifier[ey_high] )
identifier[npoints] += literal[int]
identifier[graph] . identifier[Set] ( identifier[npoints] )
keyword[return] identifier[graph] | def poisson_errors(self):
"""
Return a TGraphAsymmErrors representation of this histogram where the
point y errors are Poisson.
"""
graph = Graph(self.nbins(axis=0), type='asymm')
graph.SetLineWidth(self.GetLineWidth())
graph.SetMarkerSize(self.GetMarkerSize())
chisqr = ROOT.TMath.ChisquareQuantile
npoints = 0
for bin in self.bins(overflow=False):
entries = bin.effective_entries
if entries <= 0:
continue # depends on [control=['if'], data=[]]
ey_low = entries - 0.5 * chisqr(0.1586555, 2.0 * entries)
ey_high = 0.5 * chisqr(1.0 - 0.1586555, 2.0 * (entries + 1)) - entries
ex = bin.x.width / 2.0
graph.SetPoint(npoints, bin.x.center, bin.value)
graph.SetPointEXlow(npoints, ex)
graph.SetPointEXhigh(npoints, ex)
graph.SetPointEYlow(npoints, ey_low)
graph.SetPointEYhigh(npoints, ey_high)
npoints += 1 # depends on [control=['for'], data=['bin']]
graph.Set(npoints)
return graph |
def image_predict(self, X):
"""
Predicts class label for the entire image.
:param X: Array of images to be classified.
:type X: numpy array, shape = [n_images, n_pixels_y, n_pixels_x, n_bands]
:return: raster classification map
:rtype: numpy array, [n_samples, n_pixels_y, n_pixels_x]
"""
pixels = self.extract_pixels(X)
predictions = self.classifier.predict(pixels)
return predictions.reshape(X.shape[0], X.shape[1], X.shape[2]) | def function[image_predict, parameter[self, X]]:
constant[
Predicts class label for the entire image.
:param X: Array of images to be classified.
:type X: numpy array, shape = [n_images, n_pixels_y, n_pixels_x, n_bands]
:return: raster classification map
:rtype: numpy array, [n_samples, n_pixels_y, n_pixels_x]
]
variable[pixels] assign[=] call[name[self].extract_pixels, parameter[name[X]]]
variable[predictions] assign[=] call[name[self].classifier.predict, parameter[name[pixels]]]
return[call[name[predictions].reshape, parameter[call[name[X].shape][constant[0]], call[name[X].shape][constant[1]], call[name[X].shape][constant[2]]]]] | keyword[def] identifier[image_predict] ( identifier[self] , identifier[X] ):
literal[string]
identifier[pixels] = identifier[self] . identifier[extract_pixels] ( identifier[X] )
identifier[predictions] = identifier[self] . identifier[classifier] . identifier[predict] ( identifier[pixels] )
keyword[return] identifier[predictions] . identifier[reshape] ( identifier[X] . identifier[shape] [ literal[int] ], identifier[X] . identifier[shape] [ literal[int] ], identifier[X] . identifier[shape] [ literal[int] ]) | def image_predict(self, X):
"""
Predicts class label for the entire image.
:param X: Array of images to be classified.
:type X: numpy array, shape = [n_images, n_pixels_y, n_pixels_x, n_bands]
:return: raster classification map
:rtype: numpy array, [n_samples, n_pixels_y, n_pixels_x]
"""
pixels = self.extract_pixels(X)
predictions = self.classifier.predict(pixels)
return predictions.reshape(X.shape[0], X.shape[1], X.shape[2]) |
def convert_to_string(ndarr):
"""
Writes the contents of the numpy.ndarray ndarr to bytes in IDX format and
returns it.
"""
with contextlib.closing(BytesIO()) as bytesio:
_internal_write(bytesio, ndarr)
return bytesio.getvalue() | def function[convert_to_string, parameter[ndarr]]:
constant[
Writes the contents of the numpy.ndarray ndarr to bytes in IDX format and
returns it.
]
with call[name[contextlib].closing, parameter[call[name[BytesIO], parameter[]]]] begin[:]
call[name[_internal_write], parameter[name[bytesio], name[ndarr]]]
return[call[name[bytesio].getvalue, parameter[]]] | keyword[def] identifier[convert_to_string] ( identifier[ndarr] ):
literal[string]
keyword[with] identifier[contextlib] . identifier[closing] ( identifier[BytesIO] ()) keyword[as] identifier[bytesio] :
identifier[_internal_write] ( identifier[bytesio] , identifier[ndarr] )
keyword[return] identifier[bytesio] . identifier[getvalue] () | def convert_to_string(ndarr):
"""
Writes the contents of the numpy.ndarray ndarr to bytes in IDX format and
returns it.
"""
with contextlib.closing(BytesIO()) as bytesio:
_internal_write(bytesio, ndarr)
return bytesio.getvalue() # depends on [control=['with'], data=['bytesio']] |
def parse_meta(file_content, cable):
"""\
Extracts the reference id, date/time of creation, the classification,
and the origin of the cable and assigns the value to the provided `cable`.
"""
end_idx = file_content.rindex("</table>")
start_idx = file_content.rindex("<table class='cable'>", 0, end_idx)
m = _META_PATTERN.search(file_content, start_idx, end_idx)
if not m:
raise ValueError('Cable table not found')
if len(m.groups()) != 4:
raise ValueError('Unexpected metadata result: "%r"' % m.groups())
# Table content:
# Reference ID | Created | Classification | Origin
ref, created, classification, origin = m.groups()
if cable.reference_id != ref:
reference_id = MALFORMED_CABLE_IDS.get(ref)
if reference_id != cable.reference_id:
reference_id = INVALID_CABLE_IDS.get(ref)
if reference_id != cable.reference_id:
raise ValueError('cable.reference_id != ref. reference_id="%s", ref="%s"' % (cable.reference_id, ref))
cable.created = created
cable.origin = origin
# classifications are usually written in upper case, but you never know..
cable.classification = classification.upper()
# Try to find media IRIs
start_idx = file_content.rfind(u'Appears in these', start_idx, end_idx)
if start_idx > 0:
cable.media_uris = _MEDIA_URLS_PATTERN.findall(file_content, start_idx, end_idx)
return cable | def function[parse_meta, parameter[file_content, cable]]:
constant[ Extracts the reference id, date/time of creation, the classification,
and the origin of the cable and assigns the value to the provided `cable`.
]
variable[end_idx] assign[=] call[name[file_content].rindex, parameter[constant[</table>]]]
variable[start_idx] assign[=] call[name[file_content].rindex, parameter[constant[<table class='cable'>], constant[0], name[end_idx]]]
variable[m] assign[=] call[name[_META_PATTERN].search, parameter[name[file_content], name[start_idx], name[end_idx]]]
if <ast.UnaryOp object at 0x7da20c7c9f30> begin[:]
<ast.Raise object at 0x7da20c7c85b0>
if compare[call[name[len], parameter[call[name[m].groups, parameter[]]]] not_equal[!=] constant[4]] begin[:]
<ast.Raise object at 0x7da20c6e6290>
<ast.Tuple object at 0x7da20c6e7ee0> assign[=] call[name[m].groups, parameter[]]
if compare[name[cable].reference_id not_equal[!=] name[ref]] begin[:]
variable[reference_id] assign[=] call[name[MALFORMED_CABLE_IDS].get, parameter[name[ref]]]
if compare[name[reference_id] not_equal[!=] name[cable].reference_id] begin[:]
variable[reference_id] assign[=] call[name[INVALID_CABLE_IDS].get, parameter[name[ref]]]
if compare[name[reference_id] not_equal[!=] name[cable].reference_id] begin[:]
<ast.Raise object at 0x7da20c6e4b80>
name[cable].created assign[=] name[created]
name[cable].origin assign[=] name[origin]
name[cable].classification assign[=] call[name[classification].upper, parameter[]]
variable[start_idx] assign[=] call[name[file_content].rfind, parameter[constant[Appears in these], name[start_idx], name[end_idx]]]
if compare[name[start_idx] greater[>] constant[0]] begin[:]
name[cable].media_uris assign[=] call[name[_MEDIA_URLS_PATTERN].findall, parameter[name[file_content], name[start_idx], name[end_idx]]]
return[name[cable]] | keyword[def] identifier[parse_meta] ( identifier[file_content] , identifier[cable] ):
literal[string]
identifier[end_idx] = identifier[file_content] . identifier[rindex] ( literal[string] )
identifier[start_idx] = identifier[file_content] . identifier[rindex] ( literal[string] , literal[int] , identifier[end_idx] )
identifier[m] = identifier[_META_PATTERN] . identifier[search] ( identifier[file_content] , identifier[start_idx] , identifier[end_idx] )
keyword[if] keyword[not] identifier[m] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[len] ( identifier[m] . identifier[groups] ())!= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[m] . identifier[groups] ())
identifier[ref] , identifier[created] , identifier[classification] , identifier[origin] = identifier[m] . identifier[groups] ()
keyword[if] identifier[cable] . identifier[reference_id] != identifier[ref] :
identifier[reference_id] = identifier[MALFORMED_CABLE_IDS] . identifier[get] ( identifier[ref] )
keyword[if] identifier[reference_id] != identifier[cable] . identifier[reference_id] :
identifier[reference_id] = identifier[INVALID_CABLE_IDS] . identifier[get] ( identifier[ref] )
keyword[if] identifier[reference_id] != identifier[cable] . identifier[reference_id] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[cable] . identifier[reference_id] , identifier[ref] ))
identifier[cable] . identifier[created] = identifier[created]
identifier[cable] . identifier[origin] = identifier[origin]
identifier[cable] . identifier[classification] = identifier[classification] . identifier[upper] ()
identifier[start_idx] = identifier[file_content] . identifier[rfind] ( literal[string] , identifier[start_idx] , identifier[end_idx] )
keyword[if] identifier[start_idx] > literal[int] :
identifier[cable] . identifier[media_uris] = identifier[_MEDIA_URLS_PATTERN] . identifier[findall] ( identifier[file_content] , identifier[start_idx] , identifier[end_idx] )
keyword[return] identifier[cable] | def parse_meta(file_content, cable):
""" Extracts the reference id, date/time of creation, the classification,
and the origin of the cable and assigns the value to the provided `cable`.
"""
end_idx = file_content.rindex('</table>')
start_idx = file_content.rindex("<table class='cable'>", 0, end_idx)
m = _META_PATTERN.search(file_content, start_idx, end_idx)
if not m:
raise ValueError('Cable table not found') # depends on [control=['if'], data=[]]
if len(m.groups()) != 4:
raise ValueError('Unexpected metadata result: "%r"' % m.groups()) # depends on [control=['if'], data=[]] # Table content:
# Reference ID | Created | Classification | Origin
(ref, created, classification, origin) = m.groups()
if cable.reference_id != ref:
reference_id = MALFORMED_CABLE_IDS.get(ref)
if reference_id != cable.reference_id:
reference_id = INVALID_CABLE_IDS.get(ref)
if reference_id != cable.reference_id:
raise ValueError('cable.reference_id != ref. reference_id="%s", ref="%s"' % (cable.reference_id, ref)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['reference_id']] # depends on [control=['if'], data=['ref']]
cable.created = created
cable.origin = origin # classifications are usually written in upper case, but you never know..
cable.classification = classification.upper()
# Try to find media IRIs
start_idx = file_content.rfind(u'Appears in these', start_idx, end_idx)
if start_idx > 0:
cable.media_uris = _MEDIA_URLS_PATTERN.findall(file_content, start_idx, end_idx) # depends on [control=['if'], data=['start_idx']]
return cable |
def getLinkedRequests(self):
"""Lookup linked Analysis Requests
:returns: sorted list of ARs, where the latest AR comes first
"""
rc = api.get_tool("reference_catalog")
refs = rc.getBackReferences(self, "AnalysisRequestAttachment")
# fetch the objects by UID and handle nonexisting UIDs gracefully
ars = map(lambda ref: api.get_object_by_uid(ref.sourceUID, None), refs)
# filter out None values (nonexisting UIDs)
ars = filter(None, ars)
# sort by physical path, so that attachments coming from an AR with a
# higher "-Rn" suffix get sorted correctly.
# N.B. the created date is the same, hence we can not use it
return sorted(ars, key=api.get_path, reverse=True) | def function[getLinkedRequests, parameter[self]]:
constant[Lookup linked Analysis Requests
:returns: sorted list of ARs, where the latest AR comes first
]
variable[rc] assign[=] call[name[api].get_tool, parameter[constant[reference_catalog]]]
variable[refs] assign[=] call[name[rc].getBackReferences, parameter[name[self], constant[AnalysisRequestAttachment]]]
variable[ars] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da1b2344940>, name[refs]]]
variable[ars] assign[=] call[name[filter], parameter[constant[None], name[ars]]]
return[call[name[sorted], parameter[name[ars]]]] | keyword[def] identifier[getLinkedRequests] ( identifier[self] ):
literal[string]
identifier[rc] = identifier[api] . identifier[get_tool] ( literal[string] )
identifier[refs] = identifier[rc] . identifier[getBackReferences] ( identifier[self] , literal[string] )
identifier[ars] = identifier[map] ( keyword[lambda] identifier[ref] : identifier[api] . identifier[get_object_by_uid] ( identifier[ref] . identifier[sourceUID] , keyword[None] ), identifier[refs] )
identifier[ars] = identifier[filter] ( keyword[None] , identifier[ars] )
keyword[return] identifier[sorted] ( identifier[ars] , identifier[key] = identifier[api] . identifier[get_path] , identifier[reverse] = keyword[True] ) | def getLinkedRequests(self):
"""Lookup linked Analysis Requests
:returns: sorted list of ARs, where the latest AR comes first
"""
rc = api.get_tool('reference_catalog')
refs = rc.getBackReferences(self, 'AnalysisRequestAttachment')
# fetch the objects by UID and handle nonexisting UIDs gracefully
ars = map(lambda ref: api.get_object_by_uid(ref.sourceUID, None), refs)
# filter out None values (nonexisting UIDs)
ars = filter(None, ars)
# sort by physical path, so that attachments coming from an AR with a
# higher "-Rn" suffix get sorted correctly.
# N.B. the created date is the same, hence we can not use it
return sorted(ars, key=api.get_path, reverse=True) |
def _insertPoint(self, index, position, type="line",
smooth=False, name=None, identifier=None, **kwargs):
"""
position will be a valid position (x, y).
type will be a valid type.
smooth will be a valid boolean.
name will be a valid name or None.
identifier will be a valid identifier or None.
The identifier will not have been tested for uniqueness.
Subclasses must override this method.
"""
self.raiseNotImplementedError() | def function[_insertPoint, parameter[self, index, position, type, smooth, name, identifier]]:
constant[
position will be a valid position (x, y).
type will be a valid type.
smooth will be a valid boolean.
name will be a valid name or None.
identifier will be a valid identifier or None.
The identifier will not have been tested for uniqueness.
Subclasses must override this method.
]
call[name[self].raiseNotImplementedError, parameter[]] | keyword[def] identifier[_insertPoint] ( identifier[self] , identifier[index] , identifier[position] , identifier[type] = literal[string] ,
identifier[smooth] = keyword[False] , identifier[name] = keyword[None] , identifier[identifier] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[raiseNotImplementedError] () | def _insertPoint(self, index, position, type='line', smooth=False, name=None, identifier=None, **kwargs):
"""
position will be a valid position (x, y).
type will be a valid type.
smooth will be a valid boolean.
name will be a valid name or None.
identifier will be a valid identifier or None.
The identifier will not have been tested for uniqueness.
Subclasses must override this method.
"""
self.raiseNotImplementedError() |
def CheckAddressState(self, script_hash):
"""
Determine the address state of the provided script hash.
Args:
script_hash (UInt160): a script hash to determine the address state of.
Returns:
AddressState: the address state.
"""
for key, contract in self._contracts.items():
if contract.ScriptHash.ToBytes() == script_hash.ToBytes():
return AddressState.InWallet
for watch in self._watch_only:
if watch == script_hash:
return AddressState.InWallet | AddressState.WatchOnly
return AddressState.NoState | def function[CheckAddressState, parameter[self, script_hash]]:
constant[
Determine the address state of the provided script hash.
Args:
script_hash (UInt160): a script hash to determine the address state of.
Returns:
AddressState: the address state.
]
for taget[tuple[[<ast.Name object at 0x7da204621540>, <ast.Name object at 0x7da204623730>]]] in starred[call[name[self]._contracts.items, parameter[]]] begin[:]
if compare[call[name[contract].ScriptHash.ToBytes, parameter[]] equal[==] call[name[script_hash].ToBytes, parameter[]]] begin[:]
return[name[AddressState].InWallet]
for taget[name[watch]] in starred[name[self]._watch_only] begin[:]
if compare[name[watch] equal[==] name[script_hash]] begin[:]
return[binary_operation[name[AddressState].InWallet <ast.BitOr object at 0x7da2590d6aa0> name[AddressState].WatchOnly]]
return[name[AddressState].NoState] | keyword[def] identifier[CheckAddressState] ( identifier[self] , identifier[script_hash] ):
literal[string]
keyword[for] identifier[key] , identifier[contract] keyword[in] identifier[self] . identifier[_contracts] . identifier[items] ():
keyword[if] identifier[contract] . identifier[ScriptHash] . identifier[ToBytes] ()== identifier[script_hash] . identifier[ToBytes] ():
keyword[return] identifier[AddressState] . identifier[InWallet]
keyword[for] identifier[watch] keyword[in] identifier[self] . identifier[_watch_only] :
keyword[if] identifier[watch] == identifier[script_hash] :
keyword[return] identifier[AddressState] . identifier[InWallet] | identifier[AddressState] . identifier[WatchOnly]
keyword[return] identifier[AddressState] . identifier[NoState] | def CheckAddressState(self, script_hash):
"""
Determine the address state of the provided script hash.
Args:
script_hash (UInt160): a script hash to determine the address state of.
Returns:
AddressState: the address state.
"""
for (key, contract) in self._contracts.items():
if contract.ScriptHash.ToBytes() == script_hash.ToBytes():
return AddressState.InWallet # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
for watch in self._watch_only:
if watch == script_hash:
return AddressState.InWallet | AddressState.WatchOnly # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['watch']]
return AddressState.NoState |
def get_assets_by_search(self, asset_query, asset_search):
"""Pass through to provider AssetSearchSession.get_assets_by_search"""
# Implemented from azosid template for -
# osid.resource.ResourceSearchSession.get_resources_by_search_template
if not self._can('search'):
raise PermissionDenied()
return self._provider_session.get_assets_by_search(asset_query, asset_search) | def function[get_assets_by_search, parameter[self, asset_query, asset_search]]:
constant[Pass through to provider AssetSearchSession.get_assets_by_search]
if <ast.UnaryOp object at 0x7da204620940> begin[:]
<ast.Raise object at 0x7da2054a5990>
return[call[name[self]._provider_session.get_assets_by_search, parameter[name[asset_query], name[asset_search]]]] | keyword[def] identifier[get_assets_by_search] ( identifier[self] , identifier[asset_query] , identifier[asset_search] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_can] ( literal[string] ):
keyword[raise] identifier[PermissionDenied] ()
keyword[return] identifier[self] . identifier[_provider_session] . identifier[get_assets_by_search] ( identifier[asset_query] , identifier[asset_search] ) | def get_assets_by_search(self, asset_query, asset_search):
"""Pass through to provider AssetSearchSession.get_assets_by_search"""
# Implemented from azosid template for -
# osid.resource.ResourceSearchSession.get_resources_by_search_template
if not self._can('search'):
raise PermissionDenied() # depends on [control=['if'], data=[]]
return self._provider_session.get_assets_by_search(asset_query, asset_search) |
def to_date(dt, tzinfo=None, format=None):
"""
Convert a datetime to date with tzinfo
"""
d = to_datetime(dt, tzinfo, format)
if not d:
return d
return date(d.year, d.month, d.day) | def function[to_date, parameter[dt, tzinfo, format]]:
constant[
Convert a datetime to date with tzinfo
]
variable[d] assign[=] call[name[to_datetime], parameter[name[dt], name[tzinfo], name[format]]]
if <ast.UnaryOp object at 0x7da2044c0790> begin[:]
return[name[d]]
return[call[name[date], parameter[name[d].year, name[d].month, name[d].day]]] | keyword[def] identifier[to_date] ( identifier[dt] , identifier[tzinfo] = keyword[None] , identifier[format] = keyword[None] ):
literal[string]
identifier[d] = identifier[to_datetime] ( identifier[dt] , identifier[tzinfo] , identifier[format] )
keyword[if] keyword[not] identifier[d] :
keyword[return] identifier[d]
keyword[return] identifier[date] ( identifier[d] . identifier[year] , identifier[d] . identifier[month] , identifier[d] . identifier[day] ) | def to_date(dt, tzinfo=None, format=None):
"""
Convert a datetime to date with tzinfo
"""
d = to_datetime(dt, tzinfo, format)
if not d:
return d # depends on [control=['if'], data=[]]
return date(d.year, d.month, d.day) |
def make_error_response(self, cond):
"""Create error response for any non-error message stanza.
:Parameters:
- `cond`: error condition name, as defined in XMPP specification.
:return: new message stanza with the same "id" as self, "from" and
"to" attributes swapped, type="error" and containing <error />
element plus payload of `self`.
:returntype: `Message`"""
if self.stanza_type == "error":
raise ValueError("Errors may not be generated in response"
" to errors")
msg = Message(stanza_type = "error", from_jid = self.to_jid,
to_jid = self.from_jid, stanza_id = self.stanza_id,
error_cond = cond,
subject = self._subject, body = self._body,
thread = self._thread)
if self._payload is None:
self.decode_payload()
for payload in self._payload:
msg.add_payload(payload.copy())
return msg | def function[make_error_response, parameter[self, cond]]:
constant[Create error response for any non-error message stanza.
:Parameters:
- `cond`: error condition name, as defined in XMPP specification.
:return: new message stanza with the same "id" as self, "from" and
"to" attributes swapped, type="error" and containing <error />
element plus payload of `self`.
:returntype: `Message`]
if compare[name[self].stanza_type equal[==] constant[error]] begin[:]
<ast.Raise object at 0x7da18eb54ee0>
variable[msg] assign[=] call[name[Message], parameter[]]
if compare[name[self]._payload is constant[None]] begin[:]
call[name[self].decode_payload, parameter[]]
for taget[name[payload]] in starred[name[self]._payload] begin[:]
call[name[msg].add_payload, parameter[call[name[payload].copy, parameter[]]]]
return[name[msg]] | keyword[def] identifier[make_error_response] ( identifier[self] , identifier[cond] ):
literal[string]
keyword[if] identifier[self] . identifier[stanza_type] == literal[string] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[msg] = identifier[Message] ( identifier[stanza_type] = literal[string] , identifier[from_jid] = identifier[self] . identifier[to_jid] ,
identifier[to_jid] = identifier[self] . identifier[from_jid] , identifier[stanza_id] = identifier[self] . identifier[stanza_id] ,
identifier[error_cond] = identifier[cond] ,
identifier[subject] = identifier[self] . identifier[_subject] , identifier[body] = identifier[self] . identifier[_body] ,
identifier[thread] = identifier[self] . identifier[_thread] )
keyword[if] identifier[self] . identifier[_payload] keyword[is] keyword[None] :
identifier[self] . identifier[decode_payload] ()
keyword[for] identifier[payload] keyword[in] identifier[self] . identifier[_payload] :
identifier[msg] . identifier[add_payload] ( identifier[payload] . identifier[copy] ())
keyword[return] identifier[msg] | def make_error_response(self, cond):
"""Create error response for any non-error message stanza.
:Parameters:
- `cond`: error condition name, as defined in XMPP specification.
:return: new message stanza with the same "id" as self, "from" and
"to" attributes swapped, type="error" and containing <error />
element plus payload of `self`.
:returntype: `Message`"""
if self.stanza_type == 'error':
raise ValueError('Errors may not be generated in response to errors') # depends on [control=['if'], data=[]]
msg = Message(stanza_type='error', from_jid=self.to_jid, to_jid=self.from_jid, stanza_id=self.stanza_id, error_cond=cond, subject=self._subject, body=self._body, thread=self._thread)
if self._payload is None:
self.decode_payload() # depends on [control=['if'], data=[]]
for payload in self._payload:
msg.add_payload(payload.copy()) # depends on [control=['for'], data=['payload']]
return msg |
def cprint(text="", color=None, on_color=None, attrs=None, **kwargs):
"""Colorizes text (and wraps to terminal's width)."""
# Assume 80 in case not running in a terminal
columns, lines = shutil.get_terminal_size()
if columns == 0:
columns = 80 # Because get_terminal_size's default fallback doesn't work in pipes
# Print text
termcolor.cprint(textwrap.fill(text, columns, drop_whitespace=False),
color=color, on_color=on_color, attrs=attrs, **kwargs) | def function[cprint, parameter[text, color, on_color, attrs]]:
constant[Colorizes text (and wraps to terminal's width).]
<ast.Tuple object at 0x7da20e9b2c20> assign[=] call[name[shutil].get_terminal_size, parameter[]]
if compare[name[columns] equal[==] constant[0]] begin[:]
variable[columns] assign[=] constant[80]
call[name[termcolor].cprint, parameter[call[name[textwrap].fill, parameter[name[text], name[columns]]]]] | keyword[def] identifier[cprint] ( identifier[text] = literal[string] , identifier[color] = keyword[None] , identifier[on_color] = keyword[None] , identifier[attrs] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[columns] , identifier[lines] = identifier[shutil] . identifier[get_terminal_size] ()
keyword[if] identifier[columns] == literal[int] :
identifier[columns] = literal[int]
identifier[termcolor] . identifier[cprint] ( identifier[textwrap] . identifier[fill] ( identifier[text] , identifier[columns] , identifier[drop_whitespace] = keyword[False] ),
identifier[color] = identifier[color] , identifier[on_color] = identifier[on_color] , identifier[attrs] = identifier[attrs] ,** identifier[kwargs] ) | def cprint(text='', color=None, on_color=None, attrs=None, **kwargs):
"""Colorizes text (and wraps to terminal's width)."""
# Assume 80 in case not running in a terminal
(columns, lines) = shutil.get_terminal_size()
if columns == 0:
columns = 80 # Because get_terminal_size's default fallback doesn't work in pipes # depends on [control=['if'], data=['columns']]
# Print text
termcolor.cprint(textwrap.fill(text, columns, drop_whitespace=False), color=color, on_color=on_color, attrs=attrs, **kwargs) |
def screen_cv2(self):
"""cv2 Image of current window screen"""
pil_image = self.screen.convert('RGB')
cv2_image = np.array(pil_image)
pil_image.close()
# Convert RGB to BGR
cv2_image = cv2_image[:, :, ::-1]
return cv2_image | def function[screen_cv2, parameter[self]]:
constant[cv2 Image of current window screen]
variable[pil_image] assign[=] call[name[self].screen.convert, parameter[constant[RGB]]]
variable[cv2_image] assign[=] call[name[np].array, parameter[name[pil_image]]]
call[name[pil_image].close, parameter[]]
variable[cv2_image] assign[=] call[name[cv2_image]][tuple[[<ast.Slice object at 0x7da20c993460>, <ast.Slice object at 0x7da20c992b90>, <ast.Slice object at 0x7da20c990f10>]]]
return[name[cv2_image]] | keyword[def] identifier[screen_cv2] ( identifier[self] ):
literal[string]
identifier[pil_image] = identifier[self] . identifier[screen] . identifier[convert] ( literal[string] )
identifier[cv2_image] = identifier[np] . identifier[array] ( identifier[pil_image] )
identifier[pil_image] . identifier[close] ()
identifier[cv2_image] = identifier[cv2_image] [:,:,::- literal[int] ]
keyword[return] identifier[cv2_image] | def screen_cv2(self):
"""cv2 Image of current window screen"""
pil_image = self.screen.convert('RGB')
cv2_image = np.array(pil_image)
pil_image.close() # Convert RGB to BGR
cv2_image = cv2_image[:, :, ::-1]
return cv2_image |
def _make_details_from_stat(self, stat_result):
"""Make a *details* dictionnary from a stat result.
"""
details = {
'_write': ['accessed', 'modified'],
'accessed': stat_result.st_atime,
'modified': stat_result.st_mtime,
'size': stat_result.st_size,
'type': int(OSFS._get_type_from_stat(stat_result)),
}
details['created'] = getattr(stat_result, 'st_birthtime', None)
ctime_key = 'created' if self.platform=="win32" else 'metadata_changed'
details[ctime_key] = getattr(stat_result, 'st_ctime', None)
return details | def function[_make_details_from_stat, parameter[self, stat_result]]:
constant[Make a *details* dictionnary from a stat result.
]
variable[details] assign[=] dictionary[[<ast.Constant object at 0x7da2043461a0>, <ast.Constant object at 0x7da2043471f0>, <ast.Constant object at 0x7da2043448e0>, <ast.Constant object at 0x7da204346dd0>, <ast.Constant object at 0x7da2043466b0>], [<ast.List object at 0x7da204346b90>, <ast.Attribute object at 0x7da204345720>, <ast.Attribute object at 0x7da204346e00>, <ast.Attribute object at 0x7da204345870>, <ast.Call object at 0x7da204345390>]]
call[name[details]][constant[created]] assign[=] call[name[getattr], parameter[name[stat_result], constant[st_birthtime], constant[None]]]
variable[ctime_key] assign[=] <ast.IfExp object at 0x7da18f00e890>
call[name[details]][name[ctime_key]] assign[=] call[name[getattr], parameter[name[stat_result], constant[st_ctime], constant[None]]]
return[name[details]] | keyword[def] identifier[_make_details_from_stat] ( identifier[self] , identifier[stat_result] ):
literal[string]
identifier[details] ={
literal[string] :[ literal[string] , literal[string] ],
literal[string] : identifier[stat_result] . identifier[st_atime] ,
literal[string] : identifier[stat_result] . identifier[st_mtime] ,
literal[string] : identifier[stat_result] . identifier[st_size] ,
literal[string] : identifier[int] ( identifier[OSFS] . identifier[_get_type_from_stat] ( identifier[stat_result] )),
}
identifier[details] [ literal[string] ]= identifier[getattr] ( identifier[stat_result] , literal[string] , keyword[None] )
identifier[ctime_key] = literal[string] keyword[if] identifier[self] . identifier[platform] == literal[string] keyword[else] literal[string]
identifier[details] [ identifier[ctime_key] ]= identifier[getattr] ( identifier[stat_result] , literal[string] , keyword[None] )
keyword[return] identifier[details] | def _make_details_from_stat(self, stat_result):
"""Make a *details* dictionnary from a stat result.
"""
details = {'_write': ['accessed', 'modified'], 'accessed': stat_result.st_atime, 'modified': stat_result.st_mtime, 'size': stat_result.st_size, 'type': int(OSFS._get_type_from_stat(stat_result))}
details['created'] = getattr(stat_result, 'st_birthtime', None)
ctime_key = 'created' if self.platform == 'win32' else 'metadata_changed'
details[ctime_key] = getattr(stat_result, 'st_ctime', None)
return details |
def handle_receive(
request: RequestType,
user: UserType = None,
sender_key_fetcher: Callable[[str], str] = None,
skip_author_verification: bool = False
) -> Tuple[str, str, List]:
"""Takes a request and passes it to the correct protocol.
Returns a tuple of:
- sender id
- protocol name
- list of entities
NOTE! The returned sender is NOT necessarily the *author* of the entity. By sender here we're
talking about the sender of the *request*. If this object is being relayed by the sender, the author
could actually be a different identity.
:arg request: Request object of type RequestType - note not a HTTP request even though the structure is similar
:arg user: User that will be passed to `protocol.receive` (only required on private encrypted content)
MUST have a `private_key` and `id` if given.
:arg sender_key_fetcher: Function that accepts sender handle and returns public key (optional)
:arg skip_author_verification: Don't verify sender (test purposes, false default)
:returns: Tuple of sender id, protocol name and list of entity objects
"""
logger.debug("handle_receive: processing request: %s", request)
found_protocol = identify_protocol_by_request(request)
logger.debug("handle_receive: using protocol %s", found_protocol.PROTOCOL_NAME)
protocol = found_protocol.Protocol()
sender, message = protocol.receive(
request, user, sender_key_fetcher, skip_author_verification=skip_author_verification)
logger.debug("handle_receive: sender %s, message %s", sender, message)
mappers = importlib.import_module("federation.entities.%s.mappers" % found_protocol.PROTOCOL_NAME)
entities = mappers.message_to_objects(message, sender, sender_key_fetcher, user)
logger.debug("handle_receive: entities %s", entities)
return sender, found_protocol.PROTOCOL_NAME, entities | def function[handle_receive, parameter[request, user, sender_key_fetcher, skip_author_verification]]:
constant[Takes a request and passes it to the correct protocol.
Returns a tuple of:
- sender id
- protocol name
- list of entities
NOTE! The returned sender is NOT necessarily the *author* of the entity. By sender here we're
talking about the sender of the *request*. If this object is being relayed by the sender, the author
could actually be a different identity.
:arg request: Request object of type RequestType - note not a HTTP request even though the structure is similar
:arg user: User that will be passed to `protocol.receive` (only required on private encrypted content)
MUST have a `private_key` and `id` if given.
:arg sender_key_fetcher: Function that accepts sender handle and returns public key (optional)
:arg skip_author_verification: Don't verify sender (test purposes, false default)
:returns: Tuple of sender id, protocol name and list of entity objects
]
call[name[logger].debug, parameter[constant[handle_receive: processing request: %s], name[request]]]
variable[found_protocol] assign[=] call[name[identify_protocol_by_request], parameter[name[request]]]
call[name[logger].debug, parameter[constant[handle_receive: using protocol %s], name[found_protocol].PROTOCOL_NAME]]
variable[protocol] assign[=] call[name[found_protocol].Protocol, parameter[]]
<ast.Tuple object at 0x7da1b05e28f0> assign[=] call[name[protocol].receive, parameter[name[request], name[user], name[sender_key_fetcher]]]
call[name[logger].debug, parameter[constant[handle_receive: sender %s, message %s], name[sender], name[message]]]
variable[mappers] assign[=] call[name[importlib].import_module, parameter[binary_operation[constant[federation.entities.%s.mappers] <ast.Mod object at 0x7da2590d6920> name[found_protocol].PROTOCOL_NAME]]]
variable[entities] assign[=] call[name[mappers].message_to_objects, parameter[name[message], name[sender], name[sender_key_fetcher], name[user]]]
call[name[logger].debug, parameter[constant[handle_receive: entities %s], name[entities]]]
return[tuple[[<ast.Name object at 0x7da1b05e3df0>, <ast.Attribute object at 0x7da1b05e1b70>, <ast.Name object at 0x7da1b05e0670>]]] | keyword[def] identifier[handle_receive] (
identifier[request] : identifier[RequestType] ,
identifier[user] : identifier[UserType] = keyword[None] ,
identifier[sender_key_fetcher] : identifier[Callable] [[ identifier[str] ], identifier[str] ]= keyword[None] ,
identifier[skip_author_verification] : identifier[bool] = keyword[False]
)-> identifier[Tuple] [ identifier[str] , identifier[str] , identifier[List] ]:
literal[string]
identifier[logger] . identifier[debug] ( literal[string] , identifier[request] )
identifier[found_protocol] = identifier[identify_protocol_by_request] ( identifier[request] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[found_protocol] . identifier[PROTOCOL_NAME] )
identifier[protocol] = identifier[found_protocol] . identifier[Protocol] ()
identifier[sender] , identifier[message] = identifier[protocol] . identifier[receive] (
identifier[request] , identifier[user] , identifier[sender_key_fetcher] , identifier[skip_author_verification] = identifier[skip_author_verification] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[sender] , identifier[message] )
identifier[mappers] = identifier[importlib] . identifier[import_module] ( literal[string] % identifier[found_protocol] . identifier[PROTOCOL_NAME] )
identifier[entities] = identifier[mappers] . identifier[message_to_objects] ( identifier[message] , identifier[sender] , identifier[sender_key_fetcher] , identifier[user] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[entities] )
keyword[return] identifier[sender] , identifier[found_protocol] . identifier[PROTOCOL_NAME] , identifier[entities] | def handle_receive(request: RequestType, user: UserType=None, sender_key_fetcher: Callable[[str], str]=None, skip_author_verification: bool=False) -> Tuple[str, str, List]:
"""Takes a request and passes it to the correct protocol.
Returns a tuple of:
- sender id
- protocol name
- list of entities
NOTE! The returned sender is NOT necessarily the *author* of the entity. By sender here we're
talking about the sender of the *request*. If this object is being relayed by the sender, the author
could actually be a different identity.
:arg request: Request object of type RequestType - note not a HTTP request even though the structure is similar
:arg user: User that will be passed to `protocol.receive` (only required on private encrypted content)
MUST have a `private_key` and `id` if given.
:arg sender_key_fetcher: Function that accepts sender handle and returns public key (optional)
:arg skip_author_verification: Don't verify sender (test purposes, false default)
:returns: Tuple of sender id, protocol name and list of entity objects
"""
logger.debug('handle_receive: processing request: %s', request)
found_protocol = identify_protocol_by_request(request)
logger.debug('handle_receive: using protocol %s', found_protocol.PROTOCOL_NAME)
protocol = found_protocol.Protocol()
(sender, message) = protocol.receive(request, user, sender_key_fetcher, skip_author_verification=skip_author_verification)
logger.debug('handle_receive: sender %s, message %s', sender, message)
mappers = importlib.import_module('federation.entities.%s.mappers' % found_protocol.PROTOCOL_NAME)
entities = mappers.message_to_objects(message, sender, sender_key_fetcher, user)
logger.debug('handle_receive: entities %s', entities)
return (sender, found_protocol.PROTOCOL_NAME, entities) |
def center (self):
"""center() -> (x, y)
Returns the center (of mass) point of this Polygon.
See http://en.wikipedia.org/wiki/Polygon
Examples:
>>> p = Polygon()
>>> p.vertices = [ Point(3, 8), Point(6, 4), Point(0, 3) ]
>>> p.center()
Point(2.89285714286, 4.82142857143)
"""
Cx = 0.0
Cy = 0.0
denom = 6.0 * self.area()
for segment in self.segments():
x = (segment.p.x + segment.q.x)
y = (segment.p.y + segment.q.y)
xy = (segment.p.x * segment.q.y) - (segment.q.x * segment.p.y)
Cx += (x * xy)
Cy += (y * xy)
Cx /= denom
Cy /= denom
return Point(Cx, Cy) | def function[center, parameter[self]]:
constant[center() -> (x, y)
Returns the center (of mass) point of this Polygon.
See http://en.wikipedia.org/wiki/Polygon
Examples:
>>> p = Polygon()
>>> p.vertices = [ Point(3, 8), Point(6, 4), Point(0, 3) ]
>>> p.center()
Point(2.89285714286, 4.82142857143)
]
variable[Cx] assign[=] constant[0.0]
variable[Cy] assign[=] constant[0.0]
variable[denom] assign[=] binary_operation[constant[6.0] * call[name[self].area, parameter[]]]
for taget[name[segment]] in starred[call[name[self].segments, parameter[]]] begin[:]
variable[x] assign[=] binary_operation[name[segment].p.x + name[segment].q.x]
variable[y] assign[=] binary_operation[name[segment].p.y + name[segment].q.y]
variable[xy] assign[=] binary_operation[binary_operation[name[segment].p.x * name[segment].q.y] - binary_operation[name[segment].q.x * name[segment].p.y]]
<ast.AugAssign object at 0x7da18eb57250>
<ast.AugAssign object at 0x7da18eb54640>
<ast.AugAssign object at 0x7da18eb57520>
<ast.AugAssign object at 0x7da18eb55c60>
return[call[name[Point], parameter[name[Cx], name[Cy]]]] | keyword[def] identifier[center] ( identifier[self] ):
literal[string]
identifier[Cx] = literal[int]
identifier[Cy] = literal[int]
identifier[denom] = literal[int] * identifier[self] . identifier[area] ()
keyword[for] identifier[segment] keyword[in] identifier[self] . identifier[segments] ():
identifier[x] =( identifier[segment] . identifier[p] . identifier[x] + identifier[segment] . identifier[q] . identifier[x] )
identifier[y] =( identifier[segment] . identifier[p] . identifier[y] + identifier[segment] . identifier[q] . identifier[y] )
identifier[xy] =( identifier[segment] . identifier[p] . identifier[x] * identifier[segment] . identifier[q] . identifier[y] )-( identifier[segment] . identifier[q] . identifier[x] * identifier[segment] . identifier[p] . identifier[y] )
identifier[Cx] +=( identifier[x] * identifier[xy] )
identifier[Cy] +=( identifier[y] * identifier[xy] )
identifier[Cx] /= identifier[denom]
identifier[Cy] /= identifier[denom]
keyword[return] identifier[Point] ( identifier[Cx] , identifier[Cy] ) | def center(self):
"""center() -> (x, y)
Returns the center (of mass) point of this Polygon.
See http://en.wikipedia.org/wiki/Polygon
Examples:
>>> p = Polygon()
>>> p.vertices = [ Point(3, 8), Point(6, 4), Point(0, 3) ]
>>> p.center()
Point(2.89285714286, 4.82142857143)
"""
Cx = 0.0
Cy = 0.0
denom = 6.0 * self.area()
for segment in self.segments():
x = segment.p.x + segment.q.x
y = segment.p.y + segment.q.y
xy = segment.p.x * segment.q.y - segment.q.x * segment.p.y
Cx += x * xy
Cy += y * xy # depends on [control=['for'], data=['segment']]
Cx /= denom
Cy /= denom
return Point(Cx, Cy) |
def add_repo_to_team(self, auth, team_id, repo_name):
"""
Add or update repo from team.
:param auth.Authentication auth: authentication object, must be admin-level
:param str team_id: Team's id
:param str repo_name: Name of the repo to be added to the team
:raises NetworkFailure: if there is an error communicating with the server
:raises ApiFailure: if the request cannot be serviced
"""
url = "/admin/teams/{t}/repos/{r}".format(t=team_id, r=repo_name)
self.put(url, auth=auth) | def function[add_repo_to_team, parameter[self, auth, team_id, repo_name]]:
constant[
Add or update repo from team.
:param auth.Authentication auth: authentication object, must be admin-level
:param str team_id: Team's id
:param str repo_name: Name of the repo to be added to the team
:raises NetworkFailure: if there is an error communicating with the server
:raises ApiFailure: if the request cannot be serviced
]
variable[url] assign[=] call[constant[/admin/teams/{t}/repos/{r}].format, parameter[]]
call[name[self].put, parameter[name[url]]] | keyword[def] identifier[add_repo_to_team] ( identifier[self] , identifier[auth] , identifier[team_id] , identifier[repo_name] ):
literal[string]
identifier[url] = literal[string] . identifier[format] ( identifier[t] = identifier[team_id] , identifier[r] = identifier[repo_name] )
identifier[self] . identifier[put] ( identifier[url] , identifier[auth] = identifier[auth] ) | def add_repo_to_team(self, auth, team_id, repo_name):
"""
Add or update repo from team.
:param auth.Authentication auth: authentication object, must be admin-level
:param str team_id: Team's id
:param str repo_name: Name of the repo to be added to the team
:raises NetworkFailure: if there is an error communicating with the server
:raises ApiFailure: if the request cannot be serviced
"""
url = '/admin/teams/{t}/repos/{r}'.format(t=team_id, r=repo_name)
self.put(url, auth=auth) |
def unit_conversion(current, desired):
"""
Calculate the conversion from one set of units to another.
Parameters
---------
current : str
Unit system values are in now (eg 'millimeters')
desired : str
Unit system we'd like values in (eg 'inches')
Returns
---------
conversion : float
Number to multiply by to put values into desired units
"""
current = str(current).strip().lower()
desired = str(desired).strip().lower()
conversion = TO_INCH[current] / TO_INCH[desired]
return conversion | def function[unit_conversion, parameter[current, desired]]:
constant[
Calculate the conversion from one set of units to another.
Parameters
---------
current : str
Unit system values are in now (eg 'millimeters')
desired : str
Unit system we'd like values in (eg 'inches')
Returns
---------
conversion : float
Number to multiply by to put values into desired units
]
variable[current] assign[=] call[call[call[name[str], parameter[name[current]]].strip, parameter[]].lower, parameter[]]
variable[desired] assign[=] call[call[call[name[str], parameter[name[desired]]].strip, parameter[]].lower, parameter[]]
variable[conversion] assign[=] binary_operation[call[name[TO_INCH]][name[current]] / call[name[TO_INCH]][name[desired]]]
return[name[conversion]] | keyword[def] identifier[unit_conversion] ( identifier[current] , identifier[desired] ):
literal[string]
identifier[current] = identifier[str] ( identifier[current] ). identifier[strip] (). identifier[lower] ()
identifier[desired] = identifier[str] ( identifier[desired] ). identifier[strip] (). identifier[lower] ()
identifier[conversion] = identifier[TO_INCH] [ identifier[current] ]/ identifier[TO_INCH] [ identifier[desired] ]
keyword[return] identifier[conversion] | def unit_conversion(current, desired):
"""
Calculate the conversion from one set of units to another.
Parameters
---------
current : str
Unit system values are in now (eg 'millimeters')
desired : str
Unit system we'd like values in (eg 'inches')
Returns
---------
conversion : float
Number to multiply by to put values into desired units
"""
current = str(current).strip().lower()
desired = str(desired).strip().lower()
conversion = TO_INCH[current] / TO_INCH[desired]
return conversion |
def has_overlap(self, interval: 'Interval') -> bool:
"""Check if self has overlap with `interval`.
Args:
interval: interval to be examined
Returns:
bool: True if self has overlap with `interval` otherwise False
"""
if self.begin < interval.end and interval.begin < self.end:
return True
return False | def function[has_overlap, parameter[self, interval]]:
constant[Check if self has overlap with `interval`.
Args:
interval: interval to be examined
Returns:
bool: True if self has overlap with `interval` otherwise False
]
if <ast.BoolOp object at 0x7da1b0531ab0> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[has_overlap] ( identifier[self] , identifier[interval] : literal[string] )-> identifier[bool] :
literal[string]
keyword[if] identifier[self] . identifier[begin] < identifier[interval] . identifier[end] keyword[and] identifier[interval] . identifier[begin] < identifier[self] . identifier[end] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def has_overlap(self, interval: 'Interval') -> bool:
"""Check if self has overlap with `interval`.
Args:
interval: interval to be examined
Returns:
bool: True if self has overlap with `interval` otherwise False
"""
if self.begin < interval.end and interval.begin < self.end:
return True # depends on [control=['if'], data=[]]
return False |
def update_or_create_candidate(
self, candidate, aggregable=True, uncontested=False
):
"""Create a CandidateElection."""
candidate_election, c = CandidateElection.objects.update_or_create(
candidate=candidate,
election=self,
defaults={"aggregable": aggregable, "uncontested": uncontested},
)
return candidate_election | def function[update_or_create_candidate, parameter[self, candidate, aggregable, uncontested]]:
constant[Create a CandidateElection.]
<ast.Tuple object at 0x7da1b1d4c400> assign[=] call[name[CandidateElection].objects.update_or_create, parameter[]]
return[name[candidate_election]] | keyword[def] identifier[update_or_create_candidate] (
identifier[self] , identifier[candidate] , identifier[aggregable] = keyword[True] , identifier[uncontested] = keyword[False]
):
literal[string]
identifier[candidate_election] , identifier[c] = identifier[CandidateElection] . identifier[objects] . identifier[update_or_create] (
identifier[candidate] = identifier[candidate] ,
identifier[election] = identifier[self] ,
identifier[defaults] ={ literal[string] : identifier[aggregable] , literal[string] : identifier[uncontested] },
)
keyword[return] identifier[candidate_election] | def update_or_create_candidate(self, candidate, aggregable=True, uncontested=False):
"""Create a CandidateElection."""
(candidate_election, c) = CandidateElection.objects.update_or_create(candidate=candidate, election=self, defaults={'aggregable': aggregable, 'uncontested': uncontested})
return candidate_election |
def checkifDownloadExist(self,username,password,download , name):
""" Download single image from Landsat on Google Storage
Arguments:
row - string in this format xxx, e.g. 003
path - string in this format xxx, e.g. 003
name - zip file name without .tar.bz e.g. LT81360082013127LGN01
sat_type - e.g. L7, L8, ...
"""
try:
request = urllib2.Request(download)
base64string = base64.encodestring('%s:%s' % (username, password)).replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
result = urllib2.urlopen(request)
try:
f=open(self.zip_dir+'/'+name+'.tgz', 'wb')
f.close()
return True
except urllib2.HTTPError:
return False
except urllib2.HTTPError:
return False | def function[checkifDownloadExist, parameter[self, username, password, download, name]]:
constant[ Download single image from Landsat on Google Storage
Arguments:
row - string in this format xxx, e.g. 003
path - string in this format xxx, e.g. 003
name - zip file name without .tar.bz e.g. LT81360082013127LGN01
sat_type - e.g. L7, L8, ...
]
<ast.Try object at 0x7da1b1ff8250> | keyword[def] identifier[checkifDownloadExist] ( identifier[self] , identifier[username] , identifier[password] , identifier[download] , identifier[name] ):
literal[string]
keyword[try] :
identifier[request] = identifier[urllib2] . identifier[Request] ( identifier[download] )
identifier[base64string] = identifier[base64] . identifier[encodestring] ( literal[string] %( identifier[username] , identifier[password] )). identifier[replace] ( literal[string] , literal[string] )
identifier[request] . identifier[add_header] ( literal[string] , literal[string] % identifier[base64string] )
identifier[result] = identifier[urllib2] . identifier[urlopen] ( identifier[request] )
keyword[try] :
identifier[f] = identifier[open] ( identifier[self] . identifier[zip_dir] + literal[string] + identifier[name] + literal[string] , literal[string] )
identifier[f] . identifier[close] ()
keyword[return] keyword[True]
keyword[except] identifier[urllib2] . identifier[HTTPError] :
keyword[return] keyword[False]
keyword[except] identifier[urllib2] . identifier[HTTPError] :
keyword[return] keyword[False] | def checkifDownloadExist(self, username, password, download, name):
""" Download single image from Landsat on Google Storage
Arguments:
row - string in this format xxx, e.g. 003
path - string in this format xxx, e.g. 003
name - zip file name without .tar.bz e.g. LT81360082013127LGN01
sat_type - e.g. L7, L8, ...
"""
try:
request = urllib2.Request(download)
base64string = base64.encodestring('%s:%s' % (username, password)).replace('\n', '')
request.add_header('Authorization', 'Basic %s' % base64string)
result = urllib2.urlopen(request)
try:
f = open(self.zip_dir + '/' + name + '.tgz', 'wb')
f.close()
return True # depends on [control=['try'], data=[]]
except urllib2.HTTPError:
return False # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]]
except urllib2.HTTPError:
return False # depends on [control=['except'], data=[]] |
def process_decoded(self, d):
"""
Recursive method to support decoding dicts and lists containing
pymatgen objects.
"""
if isinstance(d, dict):
if "@module" in d and "@class" in d:
modname = d["@module"]
classname = d["@class"]
else:
modname = None
classname = None
if modname and modname not in ["bson.objectid", "numpy"]:
if modname == "datetime" and classname == "datetime":
try:
dt = datetime.datetime.strptime(d["string"],
"%Y-%m-%d %H:%M:%S.%f")
except ValueError:
dt = datetime.datetime.strptime(d["string"],
"%Y-%m-%d %H:%M:%S")
return dt
mod = __import__(modname, globals(), locals(), [classname], 0)
if hasattr(mod, classname):
cls_ = getattr(mod, classname)
data = {k: v for k, v in d.items()
if not k.startswith("@")}
if hasattr(cls_, "from_dict"):
return cls_.from_dict(data)
elif np is not None and modname == "numpy" and classname == \
"array":
return np.array(d["data"], dtype=d["dtype"])
elif (bson is not None) and modname == "bson.objectid" and \
classname == "ObjectId":
return bson.objectid.ObjectId(d["oid"])
return {self.process_decoded(k): self.process_decoded(v)
for k, v in d.items()}
elif isinstance(d, list):
return [self.process_decoded(x) for x in d]
return d | def function[process_decoded, parameter[self, d]]:
constant[
Recursive method to support decoding dicts and lists containing
pymatgen objects.
]
if call[name[isinstance], parameter[name[d], name[dict]]] begin[:]
if <ast.BoolOp object at 0x7da1b13599f0> begin[:]
variable[modname] assign[=] call[name[d]][constant[@module]]
variable[classname] assign[=] call[name[d]][constant[@class]]
if <ast.BoolOp object at 0x7da1b1358b80> begin[:]
if <ast.BoolOp object at 0x7da1b1359390> begin[:]
<ast.Try object at 0x7da1b13580d0>
return[name[dt]]
variable[mod] assign[=] call[name[__import__], parameter[name[modname], call[name[globals], parameter[]], call[name[locals], parameter[]], list[[<ast.Name object at 0x7da1b135afb0>]], constant[0]]]
if call[name[hasattr], parameter[name[mod], name[classname]]] begin[:]
variable[cls_] assign[=] call[name[getattr], parameter[name[mod], name[classname]]]
variable[data] assign[=] <ast.DictComp object at 0x7da1b135a8f0>
if call[name[hasattr], parameter[name[cls_], constant[from_dict]]] begin[:]
return[call[name[cls_].from_dict, parameter[name[data]]]]
return[<ast.DictComp object at 0x7da1b1359720>]
return[name[d]] | keyword[def] identifier[process_decoded] ( identifier[self] , identifier[d] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[d] , identifier[dict] ):
keyword[if] literal[string] keyword[in] identifier[d] keyword[and] literal[string] keyword[in] identifier[d] :
identifier[modname] = identifier[d] [ literal[string] ]
identifier[classname] = identifier[d] [ literal[string] ]
keyword[else] :
identifier[modname] = keyword[None]
identifier[classname] = keyword[None]
keyword[if] identifier[modname] keyword[and] identifier[modname] keyword[not] keyword[in] [ literal[string] , literal[string] ]:
keyword[if] identifier[modname] == literal[string] keyword[and] identifier[classname] == literal[string] :
keyword[try] :
identifier[dt] = identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[d] [ literal[string] ],
literal[string] )
keyword[except] identifier[ValueError] :
identifier[dt] = identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[d] [ literal[string] ],
literal[string] )
keyword[return] identifier[dt]
identifier[mod] = identifier[__import__] ( identifier[modname] , identifier[globals] (), identifier[locals] (),[ identifier[classname] ], literal[int] )
keyword[if] identifier[hasattr] ( identifier[mod] , identifier[classname] ):
identifier[cls_] = identifier[getattr] ( identifier[mod] , identifier[classname] )
identifier[data] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[d] . identifier[items] ()
keyword[if] keyword[not] identifier[k] . identifier[startswith] ( literal[string] )}
keyword[if] identifier[hasattr] ( identifier[cls_] , literal[string] ):
keyword[return] identifier[cls_] . identifier[from_dict] ( identifier[data] )
keyword[elif] identifier[np] keyword[is] keyword[not] keyword[None] keyword[and] identifier[modname] == literal[string] keyword[and] identifier[classname] == literal[string] :
keyword[return] identifier[np] . identifier[array] ( identifier[d] [ literal[string] ], identifier[dtype] = identifier[d] [ literal[string] ])
keyword[elif] ( identifier[bson] keyword[is] keyword[not] keyword[None] ) keyword[and] identifier[modname] == literal[string] keyword[and] identifier[classname] == literal[string] :
keyword[return] identifier[bson] . identifier[objectid] . identifier[ObjectId] ( identifier[d] [ literal[string] ])
keyword[return] { identifier[self] . identifier[process_decoded] ( identifier[k] ): identifier[self] . identifier[process_decoded] ( identifier[v] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[d] . identifier[items] ()}
keyword[elif] identifier[isinstance] ( identifier[d] , identifier[list] ):
keyword[return] [ identifier[self] . identifier[process_decoded] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[d] ]
keyword[return] identifier[d] | def process_decoded(self, d):
"""
Recursive method to support decoding dicts and lists containing
pymatgen objects.
"""
if isinstance(d, dict):
if '@module' in d and '@class' in d:
modname = d['@module']
classname = d['@class'] # depends on [control=['if'], data=[]]
else:
modname = None
classname = None
if modname and modname not in ['bson.objectid', 'numpy']:
if modname == 'datetime' and classname == 'datetime':
try:
dt = datetime.datetime.strptime(d['string'], '%Y-%m-%d %H:%M:%S.%f') # depends on [control=['try'], data=[]]
except ValueError:
dt = datetime.datetime.strptime(d['string'], '%Y-%m-%d %H:%M:%S') # depends on [control=['except'], data=[]]
return dt # depends on [control=['if'], data=[]]
mod = __import__(modname, globals(), locals(), [classname], 0)
if hasattr(mod, classname):
cls_ = getattr(mod, classname)
data = {k: v for (k, v) in d.items() if not k.startswith('@')}
if hasattr(cls_, 'from_dict'):
return cls_.from_dict(data) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif np is not None and modname == 'numpy' and (classname == 'array'):
return np.array(d['data'], dtype=d['dtype']) # depends on [control=['if'], data=[]]
elif bson is not None and modname == 'bson.objectid' and (classname == 'ObjectId'):
return bson.objectid.ObjectId(d['oid']) # depends on [control=['if'], data=[]]
return {self.process_decoded(k): self.process_decoded(v) for (k, v) in d.items()} # depends on [control=['if'], data=[]]
elif isinstance(d, list):
return [self.process_decoded(x) for x in d] # depends on [control=['if'], data=[]]
return d |
def session_scope(self):
"""Provide a transactional scope around a series of operations."""
session = self.session()
try:
yield session
session.commit()
except Exception as e:
session.rollback()
raise e
finally:
session.close() | def function[session_scope, parameter[self]]:
constant[Provide a transactional scope around a series of operations.]
variable[session] assign[=] call[name[self].session, parameter[]]
<ast.Try object at 0x7da1b1f37fd0> | keyword[def] identifier[session_scope] ( identifier[self] ):
literal[string]
identifier[session] = identifier[self] . identifier[session] ()
keyword[try] :
keyword[yield] identifier[session]
identifier[session] . identifier[commit] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[session] . identifier[rollback] ()
keyword[raise] identifier[e]
keyword[finally] :
identifier[session] . identifier[close] () | def session_scope(self):
"""Provide a transactional scope around a series of operations."""
session = self.session()
try:
yield session
session.commit() # depends on [control=['try'], data=[]]
except Exception as e:
session.rollback()
raise e # depends on [control=['except'], data=['e']]
finally:
session.close() |
def get_obj(path):
"""Return obj for given dotted path.
Typical inputs for `path` are 'os' or 'os.path' in which case you get a
module; or 'os.path.exists' in which case you get a function from that
module.
Just returns the given input in case it is not a str.
Note: Relative imports not supported.
Raises ImportError or AttributeError as appropriate.
"""
# Since we usually pass in mocks here; duck typing is not appropriate
# (mocks respond to every attribute).
if not isinstance(path, str):
return path
if path.startswith('.'):
raise TypeError('relative imports are not supported')
parts = path.split('.')
head, tail = parts[0], parts[1:]
obj = importlib.import_module(head)
# Normally a simple reduce, but we go the extra mile
# for good exception messages.
for i, name in enumerate(tail):
try:
obj = getattr(obj, name)
except AttributeError:
# Note the [:i] instead of [:i+1], so we get the path just
# *before* the AttributeError, t.i. the part of it that went ok.
module = '.'.join([head] + tail[:i])
try:
importlib.import_module(module)
except ImportError:
raise AttributeError(
"object '%s' has no attribute '%s'" % (module, name))
else:
raise AttributeError(
"module '%s' has no attribute '%s'" % (module, name))
return obj | def function[get_obj, parameter[path]]:
constant[Return obj for given dotted path.
Typical inputs for `path` are 'os' or 'os.path' in which case you get a
module; or 'os.path.exists' in which case you get a function from that
module.
Just returns the given input in case it is not a str.
Note: Relative imports not supported.
Raises ImportError or AttributeError as appropriate.
]
if <ast.UnaryOp object at 0x7da20c6e7940> begin[:]
return[name[path]]
if call[name[path].startswith, parameter[constant[.]]] begin[:]
<ast.Raise object at 0x7da20c6e66e0>
variable[parts] assign[=] call[name[path].split, parameter[constant[.]]]
<ast.Tuple object at 0x7da20c6e5ab0> assign[=] tuple[[<ast.Subscript object at 0x7da20c6e7cd0>, <ast.Subscript object at 0x7da20c6e4c10>]]
variable[obj] assign[=] call[name[importlib].import_module, parameter[name[head]]]
for taget[tuple[[<ast.Name object at 0x7da20c6e78e0>, <ast.Name object at 0x7da20c6e7820>]]] in starred[call[name[enumerate], parameter[name[tail]]]] begin[:]
<ast.Try object at 0x7da20c6e60b0>
return[name[obj]] | keyword[def] identifier[get_obj] ( identifier[path] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[path] , identifier[str] ):
keyword[return] identifier[path]
keyword[if] identifier[path] . identifier[startswith] ( literal[string] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[parts] = identifier[path] . identifier[split] ( literal[string] )
identifier[head] , identifier[tail] = identifier[parts] [ literal[int] ], identifier[parts] [ literal[int] :]
identifier[obj] = identifier[importlib] . identifier[import_module] ( identifier[head] )
keyword[for] identifier[i] , identifier[name] keyword[in] identifier[enumerate] ( identifier[tail] ):
keyword[try] :
identifier[obj] = identifier[getattr] ( identifier[obj] , identifier[name] )
keyword[except] identifier[AttributeError] :
identifier[module] = literal[string] . identifier[join] ([ identifier[head] ]+ identifier[tail] [: identifier[i] ])
keyword[try] :
identifier[importlib] . identifier[import_module] ( identifier[module] )
keyword[except] identifier[ImportError] :
keyword[raise] identifier[AttributeError] (
literal[string] %( identifier[module] , identifier[name] ))
keyword[else] :
keyword[raise] identifier[AttributeError] (
literal[string] %( identifier[module] , identifier[name] ))
keyword[return] identifier[obj] | def get_obj(path):
"""Return obj for given dotted path.
Typical inputs for `path` are 'os' or 'os.path' in which case you get a
module; or 'os.path.exists' in which case you get a function from that
module.
Just returns the given input in case it is not a str.
Note: Relative imports not supported.
Raises ImportError or AttributeError as appropriate.
"""
# Since we usually pass in mocks here; duck typing is not appropriate
# (mocks respond to every attribute).
if not isinstance(path, str):
return path # depends on [control=['if'], data=[]]
if path.startswith('.'):
raise TypeError('relative imports are not supported') # depends on [control=['if'], data=[]]
parts = path.split('.')
(head, tail) = (parts[0], parts[1:])
obj = importlib.import_module(head)
# Normally a simple reduce, but we go the extra mile
# for good exception messages.
for (i, name) in enumerate(tail):
try:
obj = getattr(obj, name) # depends on [control=['try'], data=[]]
except AttributeError:
# Note the [:i] instead of [:i+1], so we get the path just
# *before* the AttributeError, t.i. the part of it that went ok.
module = '.'.join([head] + tail[:i])
try:
importlib.import_module(module) # depends on [control=['try'], data=[]]
except ImportError:
raise AttributeError("object '%s' has no attribute '%s'" % (module, name)) # depends on [control=['except'], data=[]]
else:
raise AttributeError("module '%s' has no attribute '%s'" % (module, name)) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]]
return obj |
def keypairs(cls, data, columns=None, use_index=False, name=None):
"""This will format the data as Key: Value pairs, rather than the
idx/col/val style. This is useful for some transforms, and to
key choropleth map data
Standard Data Types:
List: [0, 10, 20, 30, 40]
Paired Tuples: ((0, 1), (0, 2), (0, 3))
Dict: {'A': 10, 'B': 20, 'C': 30, 'D': 40, 'E': 50}
Plus Pandas DataFrame and Series, and Numpy ndarray
Parameters
----------
data:
List, Tuple, Dict, Pandas Series/DataFrame, Numpy ndarray
columns: list, default None
If passing Pandas DataFrame, you must pass at least one column
name.If one column is passed, x-values will default to the index
values.If two column names are passed, x-values are columns[0],
y-values columns[1].
use_index: boolean, default False
Use the DataFrame index for your x-values
"""
if not name:
name = 'table'
cls.raw_data = data
# Tuples
if isinstance(data, tuple):
values = [{"x": x[0], "y": x[1]} for x in data]
# Lists
elif isinstance(data, list):
values = [{"x": x, "y": y}
for x, y in zip(range(len(data) + 1), data)]
# Dicts
elif isinstance(data, dict) or isinstance(data, pd.Series):
values = [{"x": x, "y": y} for x, y in sorted(data.items())]
# Dataframes
elif isinstance(data, pd.DataFrame):
if len(columns) > 1 and use_index:
raise ValueError('If using index as x-axis, len(columns)'
'cannot be > 1')
if use_index or len(columns) == 1:
values = [{"x": cls.serialize(x[0]),
"y": cls.serialize(x[1][columns[0]])}
for x in data.iterrows()]
else:
values = [{"x": cls.serialize(x[1][columns[0]]),
"y": cls.serialize(x[1][columns[1]])}
for x in data.iterrows()]
# NumPy arrays
elif isinstance(data, np.ndarray):
values = cls._numpy_to_values(data)
else:
raise TypeError('unknown data type %s' % type(data))
return cls(name, values=values) | def function[keypairs, parameter[cls, data, columns, use_index, name]]:
constant[This will format the data as Key: Value pairs, rather than the
idx/col/val style. This is useful for some transforms, and to
key choropleth map data
Standard Data Types:
List: [0, 10, 20, 30, 40]
Paired Tuples: ((0, 1), (0, 2), (0, 3))
Dict: {'A': 10, 'B': 20, 'C': 30, 'D': 40, 'E': 50}
Plus Pandas DataFrame and Series, and Numpy ndarray
Parameters
----------
data:
List, Tuple, Dict, Pandas Series/DataFrame, Numpy ndarray
columns: list, default None
If passing Pandas DataFrame, you must pass at least one column
name.If one column is passed, x-values will default to the index
values.If two column names are passed, x-values are columns[0],
y-values columns[1].
use_index: boolean, default False
Use the DataFrame index for your x-values
]
if <ast.UnaryOp object at 0x7da18eb55210> begin[:]
variable[name] assign[=] constant[table]
name[cls].raw_data assign[=] name[data]
if call[name[isinstance], parameter[name[data], name[tuple]]] begin[:]
variable[values] assign[=] <ast.ListComp object at 0x7da18eb57130>
return[call[name[cls], parameter[name[name]]]] | keyword[def] identifier[keypairs] ( identifier[cls] , identifier[data] , identifier[columns] = keyword[None] , identifier[use_index] = keyword[False] , identifier[name] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[name] :
identifier[name] = literal[string]
identifier[cls] . identifier[raw_data] = identifier[data]
keyword[if] identifier[isinstance] ( identifier[data] , identifier[tuple] ):
identifier[values] =[{ literal[string] : identifier[x] [ literal[int] ], literal[string] : identifier[x] [ literal[int] ]} keyword[for] identifier[x] keyword[in] identifier[data] ]
keyword[elif] identifier[isinstance] ( identifier[data] , identifier[list] ):
identifier[values] =[{ literal[string] : identifier[x] , literal[string] : identifier[y] }
keyword[for] identifier[x] , identifier[y] keyword[in] identifier[zip] ( identifier[range] ( identifier[len] ( identifier[data] )+ literal[int] ), identifier[data] )]
keyword[elif] identifier[isinstance] ( identifier[data] , identifier[dict] ) keyword[or] identifier[isinstance] ( identifier[data] , identifier[pd] . identifier[Series] ):
identifier[values] =[{ literal[string] : identifier[x] , literal[string] : identifier[y] } keyword[for] identifier[x] , identifier[y] keyword[in] identifier[sorted] ( identifier[data] . identifier[items] ())]
keyword[elif] identifier[isinstance] ( identifier[data] , identifier[pd] . identifier[DataFrame] ):
keyword[if] identifier[len] ( identifier[columns] )> literal[int] keyword[and] identifier[use_index] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[if] identifier[use_index] keyword[or] identifier[len] ( identifier[columns] )== literal[int] :
identifier[values] =[{ literal[string] : identifier[cls] . identifier[serialize] ( identifier[x] [ literal[int] ]),
literal[string] : identifier[cls] . identifier[serialize] ( identifier[x] [ literal[int] ][ identifier[columns] [ literal[int] ]])}
keyword[for] identifier[x] keyword[in] identifier[data] . identifier[iterrows] ()]
keyword[else] :
identifier[values] =[{ literal[string] : identifier[cls] . identifier[serialize] ( identifier[x] [ literal[int] ][ identifier[columns] [ literal[int] ]]),
literal[string] : identifier[cls] . identifier[serialize] ( identifier[x] [ literal[int] ][ identifier[columns] [ literal[int] ]])}
keyword[for] identifier[x] keyword[in] identifier[data] . identifier[iterrows] ()]
keyword[elif] identifier[isinstance] ( identifier[data] , identifier[np] . identifier[ndarray] ):
identifier[values] = identifier[cls] . identifier[_numpy_to_values] ( identifier[data] )
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] % identifier[type] ( identifier[data] ))
keyword[return] identifier[cls] ( identifier[name] , identifier[values] = identifier[values] ) | def keypairs(cls, data, columns=None, use_index=False, name=None):
"""This will format the data as Key: Value pairs, rather than the
idx/col/val style. This is useful for some transforms, and to
key choropleth map data
Standard Data Types:
List: [0, 10, 20, 30, 40]
Paired Tuples: ((0, 1), (0, 2), (0, 3))
Dict: {'A': 10, 'B': 20, 'C': 30, 'D': 40, 'E': 50}
Plus Pandas DataFrame and Series, and Numpy ndarray
Parameters
----------
data:
List, Tuple, Dict, Pandas Series/DataFrame, Numpy ndarray
columns: list, default None
If passing Pandas DataFrame, you must pass at least one column
name.If one column is passed, x-values will default to the index
values.If two column names are passed, x-values are columns[0],
y-values columns[1].
use_index: boolean, default False
Use the DataFrame index for your x-values
"""
if not name:
name = 'table' # depends on [control=['if'], data=[]]
cls.raw_data = data
# Tuples
if isinstance(data, tuple):
values = [{'x': x[0], 'y': x[1]} for x in data] # depends on [control=['if'], data=[]]
# Lists
elif isinstance(data, list):
values = [{'x': x, 'y': y} for (x, y) in zip(range(len(data) + 1), data)] # depends on [control=['if'], data=[]]
# Dicts
elif isinstance(data, dict) or isinstance(data, pd.Series):
values = [{'x': x, 'y': y} for (x, y) in sorted(data.items())] # depends on [control=['if'], data=[]]
# Dataframes
elif isinstance(data, pd.DataFrame):
if len(columns) > 1 and use_index:
raise ValueError('If using index as x-axis, len(columns)cannot be > 1') # depends on [control=['if'], data=[]]
if use_index or len(columns) == 1:
values = [{'x': cls.serialize(x[0]), 'y': cls.serialize(x[1][columns[0]])} for x in data.iterrows()] # depends on [control=['if'], data=[]]
else:
values = [{'x': cls.serialize(x[1][columns[0]]), 'y': cls.serialize(x[1][columns[1]])} for x in data.iterrows()] # depends on [control=['if'], data=[]]
# NumPy arrays
elif isinstance(data, np.ndarray):
values = cls._numpy_to_values(data) # depends on [control=['if'], data=[]]
else:
raise TypeError('unknown data type %s' % type(data))
return cls(name, values=values) |
def get(self, bounce_id, api_key=None, secure=None, test=None,
**request_args):
'''Retrieves a single bounce's data.
:param bounce_id: A bounce's ID retrieved with :class:`Bounces`.
:param api_key: Your Postmark API key. Defaults to `self.api_key`.
:param secure: Use the https scheme for Postmark API.
Defaults to `self.secure`.
:param test: Make a test request to the Postmark API.
Defaults to `self.test`.
:param \*\*request_args: Keyword args to pass to
:func:`requests.request`.
:rtype: :class:`BounceResponse`
'''
url = self._get_api_url(secure=secure, bounce_id=bounce_id)
headers = self._get_headers(api_key=api_key, test=test,
request_args=request_args)
return self._request(url, headers=headers, **request_args) | def function[get, parameter[self, bounce_id, api_key, secure, test]]:
constant[Retrieves a single bounce's data.
:param bounce_id: A bounce's ID retrieved with :class:`Bounces`.
:param api_key: Your Postmark API key. Defaults to `self.api_key`.
:param secure: Use the https scheme for Postmark API.
Defaults to `self.secure`.
:param test: Make a test request to the Postmark API.
Defaults to `self.test`.
:param \*\*request_args: Keyword args to pass to
:func:`requests.request`.
:rtype: :class:`BounceResponse`
]
variable[url] assign[=] call[name[self]._get_api_url, parameter[]]
variable[headers] assign[=] call[name[self]._get_headers, parameter[]]
return[call[name[self]._request, parameter[name[url]]]] | keyword[def] identifier[get] ( identifier[self] , identifier[bounce_id] , identifier[api_key] = keyword[None] , identifier[secure] = keyword[None] , identifier[test] = keyword[None] ,
** identifier[request_args] ):
literal[string]
identifier[url] = identifier[self] . identifier[_get_api_url] ( identifier[secure] = identifier[secure] , identifier[bounce_id] = identifier[bounce_id] )
identifier[headers] = identifier[self] . identifier[_get_headers] ( identifier[api_key] = identifier[api_key] , identifier[test] = identifier[test] ,
identifier[request_args] = identifier[request_args] )
keyword[return] identifier[self] . identifier[_request] ( identifier[url] , identifier[headers] = identifier[headers] ,** identifier[request_args] ) | def get(self, bounce_id, api_key=None, secure=None, test=None, **request_args):
"""Retrieves a single bounce's data.
:param bounce_id: A bounce's ID retrieved with :class:`Bounces`.
:param api_key: Your Postmark API key. Defaults to `self.api_key`.
:param secure: Use the https scheme for Postmark API.
Defaults to `self.secure`.
:param test: Make a test request to the Postmark API.
Defaults to `self.test`.
:param \\*\\*request_args: Keyword args to pass to
:func:`requests.request`.
:rtype: :class:`BounceResponse`
"""
url = self._get_api_url(secure=secure, bounce_id=bounce_id)
headers = self._get_headers(api_key=api_key, test=test, request_args=request_args)
return self._request(url, headers=headers, **request_args) |
def histogram(values, bins=10, vrange=None, title="", c="g", corner=1, lines=True):
"""
Build a 2D histogram from a list of values in n bins.
Use *vrange* to restrict the range of the histogram.
Use *corner* to assign its position:
- 1, topleft,
- 2, topright,
- 3, bottomleft,
- 4, bottomright.
.. hint:: Example: |fitplanes.py|_
"""
fs, edges = np.histogram(values, bins=bins, range=vrange)
pts = []
for i in range(len(fs)):
pts.append([(edges[i] + edges[i + 1]) / 2, fs[i]])
return xyplot(pts, title, c, corner, lines) | def function[histogram, parameter[values, bins, vrange, title, c, corner, lines]]:
constant[
Build a 2D histogram from a list of values in n bins.
Use *vrange* to restrict the range of the histogram.
Use *corner* to assign its position:
- 1, topleft,
- 2, topright,
- 3, bottomleft,
- 4, bottomright.
.. hint:: Example: |fitplanes.py|_
]
<ast.Tuple object at 0x7da1b06d2fe0> assign[=] call[name[np].histogram, parameter[name[values]]]
variable[pts] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[fs]]]]]] begin[:]
call[name[pts].append, parameter[list[[<ast.BinOp object at 0x7da1b06d3880>, <ast.Subscript object at 0x7da1b06d1330>]]]]
return[call[name[xyplot], parameter[name[pts], name[title], name[c], name[corner], name[lines]]]] | keyword[def] identifier[histogram] ( identifier[values] , identifier[bins] = literal[int] , identifier[vrange] = keyword[None] , identifier[title] = literal[string] , identifier[c] = literal[string] , identifier[corner] = literal[int] , identifier[lines] = keyword[True] ):
literal[string]
identifier[fs] , identifier[edges] = identifier[np] . identifier[histogram] ( identifier[values] , identifier[bins] = identifier[bins] , identifier[range] = identifier[vrange] )
identifier[pts] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[fs] )):
identifier[pts] . identifier[append] ([( identifier[edges] [ identifier[i] ]+ identifier[edges] [ identifier[i] + literal[int] ])/ literal[int] , identifier[fs] [ identifier[i] ]])
keyword[return] identifier[xyplot] ( identifier[pts] , identifier[title] , identifier[c] , identifier[corner] , identifier[lines] ) | def histogram(values, bins=10, vrange=None, title='', c='g', corner=1, lines=True):
"""
Build a 2D histogram from a list of values in n bins.
Use *vrange* to restrict the range of the histogram.
Use *corner* to assign its position:
- 1, topleft,
- 2, topright,
- 3, bottomleft,
- 4, bottomright.
.. hint:: Example: |fitplanes.py|_
"""
(fs, edges) = np.histogram(values, bins=bins, range=vrange)
pts = []
for i in range(len(fs)):
pts.append([(edges[i] + edges[i + 1]) / 2, fs[i]]) # depends on [control=['for'], data=['i']]
return xyplot(pts, title, c, corner, lines) |
def declare_units(out_units, **units_by_name):
"""Create a decorator to check units of function arguments."""
def dec(func):
# Match the signature of the function to the arguments given to the decorator
sig = signature(func)
bound_units = sig.bind_partial(**units_by_name)
@functools.wraps(func)
def wrapper(*args, **kwargs):
# Match all passed in value to their proper arguments so we can check units
bound_args = sig.bind(*args, **kwargs)
for name, val in bound_args.arguments.items():
_check_units(val, bound_units.arguments.get(name, None))
out = func(*args, **kwargs)
# In the generic case, we use the default units that should have been propagated by the computation.
if '[' in out_units:
_check_units(out, out_units)
# Otherwise, we specify explicitly the units.
else:
out.attrs['units'] = out_units
return out
return wrapper
return dec | def function[declare_units, parameter[out_units]]:
constant[Create a decorator to check units of function arguments.]
def function[dec, parameter[func]]:
variable[sig] assign[=] call[name[signature], parameter[name[func]]]
variable[bound_units] assign[=] call[name[sig].bind_partial, parameter[]]
def function[wrapper, parameter[]]:
variable[bound_args] assign[=] call[name[sig].bind, parameter[<ast.Starred object at 0x7da2044c0820>]]
for taget[tuple[[<ast.Name object at 0x7da2044c1990>, <ast.Name object at 0x7da2044c1ff0>]]] in starred[call[name[bound_args].arguments.items, parameter[]]] begin[:]
call[name[_check_units], parameter[name[val], call[name[bound_units].arguments.get, parameter[name[name], constant[None]]]]]
variable[out] assign[=] call[name[func], parameter[<ast.Starred object at 0x7da1b23d1a20>]]
if compare[constant[[] in name[out_units]] begin[:]
call[name[_check_units], parameter[name[out], name[out_units]]]
return[name[out]]
return[name[wrapper]]
return[name[dec]] | keyword[def] identifier[declare_units] ( identifier[out_units] ,** identifier[units_by_name] ):
literal[string]
keyword[def] identifier[dec] ( identifier[func] ):
identifier[sig] = identifier[signature] ( identifier[func] )
identifier[bound_units] = identifier[sig] . identifier[bind_partial] (** identifier[units_by_name] )
@ identifier[functools] . identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
identifier[bound_args] = identifier[sig] . identifier[bind] (* identifier[args] ,** identifier[kwargs] )
keyword[for] identifier[name] , identifier[val] keyword[in] identifier[bound_args] . identifier[arguments] . identifier[items] ():
identifier[_check_units] ( identifier[val] , identifier[bound_units] . identifier[arguments] . identifier[get] ( identifier[name] , keyword[None] ))
identifier[out] = identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[if] literal[string] keyword[in] identifier[out_units] :
identifier[_check_units] ( identifier[out] , identifier[out_units] )
keyword[else] :
identifier[out] . identifier[attrs] [ literal[string] ]= identifier[out_units]
keyword[return] identifier[out]
keyword[return] identifier[wrapper]
keyword[return] identifier[dec] | def declare_units(out_units, **units_by_name):
"""Create a decorator to check units of function arguments."""
def dec(func):
# Match the signature of the function to the arguments given to the decorator
sig = signature(func)
bound_units = sig.bind_partial(**units_by_name)
@functools.wraps(func)
def wrapper(*args, **kwargs):
# Match all passed in value to their proper arguments so we can check units
bound_args = sig.bind(*args, **kwargs)
for (name, val) in bound_args.arguments.items():
_check_units(val, bound_units.arguments.get(name, None)) # depends on [control=['for'], data=[]]
out = func(*args, **kwargs)
# In the generic case, we use the default units that should have been propagated by the computation.
if '[' in out_units:
_check_units(out, out_units) # depends on [control=['if'], data=['out_units']]
else:
# Otherwise, we specify explicitly the units.
out.attrs['units'] = out_units
return out
return wrapper
return dec |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.