idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
46,100
def get_types_by_attr ( resource , template_id = None ) : resource_type_templates = [ ] attr_ids = [ ] for res_attr in resource . attributes : attr_ids . append ( res_attr . attr_id ) all_resource_attr_ids = set ( attr_ids ) all_types = db . DBSession . query ( TemplateType ) . options ( joinedload_all ( 'typeattrs' ) ) . filter ( TemplateType . resource_type == resource . ref_key ) if template_id is not None : all_types = all_types . filter ( TemplateType . template_id == template_id ) all_types = all_types . all ( ) for ttype in all_types : type_attr_ids = [ ] for typeattr in ttype . typeattrs : type_attr_ids . append ( typeattr . attr_id ) if set ( type_attr_ids ) . issubset ( all_resource_attr_ids ) : resource_type_templates . append ( ttype ) return resource_type_templates
Using the attributes of the resource get all the types that this resource matches .
46,101
def _get_attr_by_name_and_dimension ( name , dimension_id ) : attr = db . DBSession . query ( Attr ) . filter ( Attr . name == name , Attr . dimension_id == dimension_id ) . first ( ) if attr is None : attr = Attr ( ) attr . dimension_id = dimension_id attr . name = name log . debug ( "Attribute not found, creating new attribute: name:%s, dimen:%s" , attr . name , attr . dimension_id ) db . DBSession . add ( attr ) return attr
Search for an attribute with the given name and dimension_id . If such an attribute does not exist create one .
46,102
def get_template_as_xml ( template_id , ** kwargs ) : template_xml = etree . Element ( "template_definition" ) template_i = db . DBSession . query ( Template ) . filter ( Template . id == template_id ) . options ( joinedload ( 'templatetypes' ) . joinedload ( 'typeattrs' ) . joinedload ( 'default_dataset' ) . joinedload ( 'metadata' ) ) . one ( ) template_name = etree . SubElement ( template_xml , "template_name" ) template_name . text = template_i . name template_description = etree . SubElement ( template_xml , "template_description" ) template_description . text = template_i . description resources = etree . SubElement ( template_xml , "resources" ) for type_i in template_i . templatetypes : xml_resource = etree . SubElement ( resources , "resource" ) resource_type = etree . SubElement ( xml_resource , "type" ) resource_type . text = type_i . resource_type name = etree . SubElement ( xml_resource , "name" ) name . text = type_i . name description = etree . SubElement ( xml_resource , "description" ) description . text = type_i . description alias = etree . SubElement ( xml_resource , "alias" ) alias . text = type_i . alias if type_i . layout is not None and type_i . layout != "" : layout = _get_layout_as_etree ( type_i . layout ) xml_resource . append ( layout ) for type_attr in type_i . typeattrs : attr = _make_attr_element_from_typeattr ( xml_resource , type_attr ) resources . append ( xml_resource ) xml_string = etree . tostring ( template_xml , encoding = "unicode" ) return xml_string
Turn a template into an xml template
46,103
def import_template_json ( template_json_string , allow_update = True , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : template_dict = json . loads ( template_json_string ) except : raise HydraError ( "Unable to parse JSON string. Plese ensure it is JSON compatible." ) return import_template_dict ( template_dict , allow_update = allow_update , user_id = user_id )
Add the template type and typeattrs described in a JSON file .
46,104
def set_network_template ( template_id , network_id , ** kwargs ) : resource_types = [ ] try : network_type = db . DBSession . query ( ResourceType ) . filter ( ResourceType . ref_key == 'NETWORK' , ResourceType . network_id == network_id , ResourceType . type_id == TemplateType . type_id , TemplateType . template_id == template_id ) . one ( ) resource_types . append ( network_type ) except NoResultFound : log . debug ( "No network type to set." ) pass node_types = db . DBSession . query ( ResourceType ) . filter ( ResourceType . ref_key == 'NODE' , ResourceType . node_id == Node . node_id , Node . network_id == network_id , ResourceType . type_id == TemplateType . type_id , TemplateType . template_id == template_id ) . all ( ) link_types = db . DBSession . query ( ResourceType ) . filter ( ResourceType . ref_key == 'LINK' , ResourceType . link_id == Link . link_id , Link . network_id == network_id , ResourceType . type_id == TemplateType . type_id , TemplateType . template_id == template_id ) . all ( ) group_types = db . DBSession . query ( ResourceType ) . filter ( ResourceType . ref_key == 'GROUP' , ResourceType . group_id == ResourceGroup . group_id , ResourceGroup . network_id == network_id , ResourceType . type_id == TemplateType . type_id , TemplateType . template_id == template_id ) . all ( ) resource_types . extend ( node_types ) resource_types . extend ( link_types ) resource_types . extend ( group_types ) assign_types_to_resources ( resource_types ) log . debug ( "Finished setting network template" )
Apply an existing template to a network . Used when a template has changed and additional attributes must be added to the network s elements .
46,105
def remove_template_from_network ( network_id , template_id , remove_attrs , ** kwargs ) : try : network = db . DBSession . query ( Network ) . filter ( Network . id == network_id ) . one ( ) except NoResultFound : raise HydraError ( "Network %s not found" % network_id ) try : template = db . DBSession . query ( Template ) . filter ( Template . id == template_id ) . one ( ) except NoResultFound : raise HydraError ( "Template %s not found" % template_id ) type_ids = [ tmpltype . id for tmpltype in template . templatetypes ] node_ids = [ n . id for n in network . nodes ] link_ids = [ l . id for l in network . links ] group_ids = [ g . id for g in network . resourcegroups ] if remove_attrs == 'Y' : resource_attrs_to_remove = _get_resources_to_remove ( network , template ) for n in network . nodes : resource_attrs_to_remove . extend ( _get_resources_to_remove ( n , template ) ) for l in network . links : resource_attrs_to_remove . extend ( _get_resources_to_remove ( l , template ) ) for g in network . resourcegroups : resource_attrs_to_remove . extend ( _get_resources_to_remove ( g , template ) ) for ra in resource_attrs_to_remove : db . DBSession . delete ( ra ) resource_types = db . DBSession . query ( ResourceType ) . filter ( and_ ( or_ ( ResourceType . network_id == network_id , ResourceType . node_id . in_ ( node_ids ) , ResourceType . link_id . in_ ( link_ids ) , ResourceType . group_id . in_ ( group_ids ) , ) , ResourceType . type_id . in_ ( type_ids ) ) ) . all ( ) for resource_type in resource_types : db . DBSession . delete ( resource_type ) db . DBSession . flush ( )
Remove all resource types in a network relating to the specified template . remove_attrs Flag to indicate whether the attributes associated with the template types should be removed from the resources in the network . These will only be removed if they are not shared with another template on the network
46,106
def _get_resources_to_remove ( resource , template ) : type_ids = [ tmpltype . id for tmpltype in template . templatetypes ] node_attr_ids = dict ( [ ( ra . attr_id , ra ) for ra in resource . attributes ] ) attrs_to_remove = [ ] attrs_to_keep = [ ] for nt in resource . types : if nt . templatetype . id in type_ids : for ta in nt . templatetype . typeattrs : if node_attr_ids . get ( ta . attr_id ) : attrs_to_remove . append ( node_attr_ids [ ta . attr_id ] ) else : for ta in nt . templatetype . typeattrs : if node_attr_ids . get ( ta . attr_id ) : attrs_to_keep . append ( node_attr_ids [ ta . attr_id ] ) final_attrs_to_remove = set ( attrs_to_remove ) - set ( attrs_to_keep ) return list ( final_attrs_to_remove )
Given a resource and a template being removed identify the resource attribtes which can be removed .
46,107
def get_matching_resource_types ( resource_type , resource_id , ** kwargs ) : resource_i = None if resource_type == 'NETWORK' : resource_i = db . DBSession . query ( Network ) . filter ( Network . id == resource_id ) . one ( ) elif resource_type == 'NODE' : resource_i = db . DBSession . query ( Node ) . filter ( Node . id == resource_id ) . one ( ) elif resource_type == 'LINK' : resource_i = db . DBSession . query ( Link ) . filter ( Link . id == resource_id ) . one ( ) elif resource_type == 'GROUP' : resource_i = db . DBSession . query ( ResourceGroup ) . filter ( ResourceGroup . id == resource_id ) . one ( ) matching_types = get_types_by_attr ( resource_i ) return matching_types
Get the possible types of a resource by checking its attributes against all available types .
46,108
def check_type_compatibility ( type_1_id , type_2_id ) : errors = [ ] type_1 = db . DBSession . query ( TemplateType ) . filter ( TemplateType . id == type_1_id ) . options ( joinedload_all ( 'typeattrs' ) ) . one ( ) type_2 = db . DBSession . query ( TemplateType ) . filter ( TemplateType . id == type_2_id ) . options ( joinedload_all ( 'typeattrs' ) ) . one ( ) template_1_name = type_1 . template . name template_2_name = type_2 . template . name type_1_attrs = set ( [ t . attr_id for t in type_1 . typeattrs ] ) type_2_attrs = set ( [ t . attr_id for t in type_2 . typeattrs ] ) shared_attrs = type_1_attrs . intersection ( type_2_attrs ) if len ( shared_attrs ) == 0 : return [ ] type_1_dict = { } for t in type_1 . typeattrs : if t . attr_id in shared_attrs : type_1_dict [ t . attr_id ] = t for ta in type_2 . typeattrs : type_2_unit_id = ta . unit_id type_1_unit_id = type_1_dict [ ta . attr_id ] . unit_id fmt_dict = { 'template_1_name' : template_1_name , 'template_2_name' : template_2_name , 'attr_name' : ta . attr . name , 'type_1_unit_id' : type_1_unit_id , 'type_2_unit_id' : type_2_unit_id , 'type_name' : type_1 . name } if type_1_unit_id is None and type_2_unit_id is not None : errors . append ( "Type %(type_name)s in template %(template_1_name)s" " stores %(attr_name)s with no units, while template" "%(template_2_name)s stores it with unit %(type_2_unit_id)s" % fmt_dict ) elif type_1_unit_id is not None and type_2_unit_id is None : errors . append ( "Type %(type_name)s in template %(template_1_name)s" " stores %(attr_name)s in %(type_1_unit_id)s." " Template %(template_2_name)s stores it with no unit." % fmt_dict ) elif type_1_unit_id != type_2_unit_id : errors . append ( "Type %(type_name)s in template %(template_1_name)s" " stores %(attr_name)s in %(type_1_unit_id)s, while" " template %(template_2_name)s stores it in %(type_2_unit_id)s" % fmt_dict ) return errors
When applying a type to a resource it may be the case that the resource already has an attribute specified in the new type but the template which defines this pre - existing attribute has a different unit specification to the new template .
46,109
def assign_type_to_resource ( type_id , resource_type , resource_id , ** kwargs ) : if resource_type == 'NETWORK' : resource = db . DBSession . query ( Network ) . filter ( Network . id == resource_id ) . one ( ) elif resource_type == 'NODE' : resource = db . DBSession . query ( Node ) . filter ( Node . id == resource_id ) . one ( ) elif resource_type == 'LINK' : resource = db . DBSession . query ( Link ) . filter ( Link . id == resource_id ) . one ( ) elif resource_type == 'GROUP' : resource = db . DBSession . query ( ResourceGroup ) . filter ( ResourceGroup . id == resource_id ) . one ( ) res_attrs , res_type , res_scenarios = set_resource_type ( resource , type_id , ** kwargs ) type_i = db . DBSession . query ( TemplateType ) . filter ( TemplateType . id == type_id ) . one ( ) if resource_type != type_i . resource_type : raise HydraError ( "Cannot assign a %s type to a %s" % ( type_i . resource_type , resource_type ) ) if res_type is not None : db . DBSession . bulk_insert_mappings ( ResourceType , [ res_type ] ) if len ( res_attrs ) > 0 : db . DBSession . bulk_insert_mappings ( ResourceAttr , res_attrs ) if len ( res_scenarios ) > 0 : db . DBSession . bulk_insert_mappings ( ResourceScenario , res_scenarios ) db . DBSession . query ( Attr ) . filter ( Attr . id == None ) . delete ( ) db . DBSession . flush ( ) return db . DBSession . query ( TemplateType ) . filter ( TemplateType . id == type_id ) . one ( )
Assign new type to a resource . This function checks if the necessary attributes are present and adds them if needed . Non existing attributes are also added when the type is already assigned . This means that this function can also be used to update resources when a resource type has changed .
46,110
def remove_type_from_resource ( type_id , resource_type , resource_id , ** kwargs ) : node_id = resource_id if resource_type == 'NODE' else None link_id = resource_id if resource_type == 'LINK' else None group_id = resource_id if resource_type == 'GROUP' else None resourcetype = db . DBSession . query ( ResourceType ) . filter ( ResourceType . type_id == type_id , ResourceType . ref_key == resource_type , ResourceType . node_id == node_id , ResourceType . link_id == link_id , ResourceType . group_id == group_id ) . one ( ) db . DBSession . delete ( resourcetype ) db . DBSession . flush ( ) return 'OK'
Remove a resource type trom a resource
46,111
def add_template ( template , ** kwargs ) : tmpl = Template ( ) tmpl . name = template . name if template . description : tmpl . description = template . description if template . layout : tmpl . layout = get_layout_as_string ( template . layout ) db . DBSession . add ( tmpl ) if template . templatetypes is not None : types = template . templatetypes for templatetype in types : ttype = _update_templatetype ( templatetype ) tmpl . templatetypes . append ( ttype ) db . DBSession . flush ( ) return tmpl
Add template and a type and typeattrs .
46,112
def update_template ( template , ** kwargs ) : tmpl = db . DBSession . query ( Template ) . filter ( Template . id == template . id ) . one ( ) tmpl . name = template . name if template . description : tmpl . description = template . description for tt in tmpl . templatetypes : for ta in tt . typeattrs : ta . attr if template . layout : tmpl . layout = get_layout_as_string ( template . layout ) type_dict = dict ( [ ( t . id , t ) for t in tmpl . templatetypes ] ) existing_templatetypes = [ ] if template . types is not None or template . templatetypes is not None : types = template . types if template . types is not None else template . templatetypes for templatetype in types : if templatetype . id is not None : type_i = type_dict [ templatetype . id ] _update_templatetype ( templatetype , type_i ) existing_templatetypes . append ( type_i . id ) else : templatetype . template_id = template . id new_templatetype_i = _update_templatetype ( templatetype ) existing_templatetypes . append ( new_templatetype_i . id ) for tt in tmpl . templatetypes : if tt . id not in existing_templatetypes : delete_templatetype ( tt . id ) db . DBSession . flush ( ) return tmpl
Update template and a type and typeattrs .
46,113
def delete_template ( template_id , ** kwargs ) : try : tmpl = db . DBSession . query ( Template ) . filter ( Template . id == template_id ) . one ( ) except NoResultFound : raise ResourceNotFoundError ( "Template %s not found" % ( template_id , ) ) db . DBSession . delete ( tmpl ) db . DBSession . flush ( ) return 'OK'
Delete a template and its type and typeattrs .
46,114
def get_template ( template_id , ** kwargs ) : try : tmpl_i = db . DBSession . query ( Template ) . filter ( Template . id == template_id ) . options ( joinedload_all ( 'templatetypes.typeattrs.default_dataset.metadata' ) ) . one ( ) for tmpltype_i in tmpl_i . templatetypes : for typeattr_i in tmpltype_i . typeattrs : typeattr_i . attr return tmpl_i except NoResultFound : raise HydraError ( "Template %s not found" % template_id )
Get a specific resource template template by ID .
46,115
def get_template_by_name ( name , ** kwargs ) : try : tmpl_i = db . DBSession . query ( Template ) . filter ( Template . name == name ) . options ( joinedload_all ( 'templatetypes.typeattrs.default_dataset.metadata' ) ) . one ( ) return tmpl_i except NoResultFound : log . info ( "%s is not a valid identifier for a template" , name ) raise HydraError ( 'Template "%s" not found' % name )
Get a specific resource template by name .
46,116
def add_templatetype ( templatetype , ** kwargs ) : type_i = _update_templatetype ( templatetype ) db . DBSession . flush ( ) return type_i
Add a template type with typeattrs .
46,117
def update_templatetype ( templatetype , ** kwargs ) : tmpltype_i = db . DBSession . query ( TemplateType ) . filter ( TemplateType . id == templatetype . id ) . one ( ) _update_templatetype ( templatetype , tmpltype_i ) db . DBSession . flush ( ) return tmpltype_i
Update a resource type and its typeattrs . New typeattrs will be added . typeattrs not sent will be ignored . To delete typeattrs call delete_typeattr
46,118
def _set_typeattr ( typeattr , existing_ta = None ) : if existing_ta is None : ta = TypeAttr ( attr_id = typeattr . attr_id ) else : ta = existing_ta ta . unit_id = typeattr . unit_id ta . type_id = typeattr . type_id ta . data_type = typeattr . data_type if hasattr ( typeattr , 'default_dataset_id' ) and typeattr . default_dataset_id is not None : ta . default_dataset_id = typeattr . default_dataset_id ta . description = typeattr . description ta . properties = typeattr . get_properties ( ) ta . attr_is_var = typeattr . is_var if typeattr . is_var is not None else 'N' ta . data_restriction = _parse_data_restriction ( typeattr . data_restriction ) if typeattr . dimension_id is None : pass else : if typeattr . attr_id is not None and typeattr . attr_id > 0 : attr = ta . attr if attr is not None and attr . dimension_id is not None and attr . dimension_id != typeattr . dimension_id or attr is not None and attr . dimension_id is not None : raise HydraError ( "Cannot set a dimension on type attribute which " "does not match its attribute. Create a new attribute if " "you want to use attribute %s with dimension_id %s" % ( attr . name , typeattr . dimension_id ) ) elif typeattr . attr_id is None and typeattr . name is not None : attr = _get_attr_by_name_and_dimension ( typeattr . name , typeattr . dimension_id ) ta . attr_id = attr . id ta . attr = attr _check_dimension ( ta ) if existing_ta is None : log . debug ( "Adding ta to DB" ) db . DBSession . add ( ta ) return ta
Add or updsate a type attribute . If an existing type attribute is provided then update .
46,119
def _update_templatetype ( templatetype , existing_tt = None ) : if existing_tt is None : if "id" in templatetype and templatetype . id is not None : tmpltype_i = db . DBSession . query ( TemplateType ) . filter ( TemplateType . id == templatetype . id ) . one ( ) else : tmpltype_i = TemplateType ( ) else : tmpltype_i = existing_tt tmpltype_i . template_id = templatetype . template_id tmpltype_i . name = templatetype . name tmpltype_i . description = templatetype . description tmpltype_i . alias = templatetype . alias if templatetype . layout is not None : tmpltype_i . layout = get_layout_as_string ( templatetype . layout ) tmpltype_i . resource_type = templatetype . resource_type ta_dict = { } for t in tmpltype_i . typeattrs : ta_dict [ t . attr_id ] = t existing_attrs = [ ] if templatetype . typeattrs is not None : for typeattr in templatetype . typeattrs : if typeattr . attr_id in ta_dict : ta = _set_typeattr ( typeattr , ta_dict [ typeattr . attr_id ] ) existing_attrs . append ( ta . attr_id ) else : ta = _set_typeattr ( typeattr ) tmpltype_i . typeattrs . append ( ta ) existing_attrs . append ( ta . attr_id ) log . debug ( "Deleting any type attrs not sent" ) for ta in ta_dict . values ( ) : if ta . attr_id not in existing_attrs : delete_typeattr ( ta ) if existing_tt is None : db . DBSession . add ( tmpltype_i ) return tmpltype_i
Add or update a templatetype . If an existing template type is passed in update that one . Otherwise search for an existing one . If not found add .
46,120
def delete_templatetype ( type_id , template_i = None , ** kwargs ) : try : tmpltype_i = db . DBSession . query ( TemplateType ) . filter ( TemplateType . id == type_id ) . one ( ) except NoResultFound : raise ResourceNotFoundError ( "Template Type %s not found" % ( type_id , ) ) if template_i is None : template_i = db . DBSession . query ( Template ) . filter ( Template . id == tmpltype_i . template_id ) . one ( ) template_i . templatetypes . remove ( tmpltype_i ) db . DBSession . delete ( tmpltype_i ) db . DBSession . flush ( )
Delete a template type and its typeattrs .
46,121
def get_templatetype ( type_id , ** kwargs ) : templatetype = db . DBSession . query ( TemplateType ) . filter ( TemplateType . id == type_id ) . options ( joinedload_all ( "typeattrs" ) ) . one ( ) return templatetype
Get a specific resource type by ID .
46,122
def get_templatetype_by_name ( template_id , type_name , ** kwargs ) : try : templatetype = db . DBSession . query ( TemplateType ) . filter ( TemplateType . id == template_id , TemplateType . name == type_name ) . one ( ) except NoResultFound : raise HydraError ( "%s is not a valid identifier for a type" % ( type_name ) ) return templatetype
Get a specific resource type by name .
46,123
def add_typeattr ( typeattr , ** kwargs ) : tmpltype = get_templatetype ( typeattr . type_id , user_id = kwargs . get ( 'user_id' ) ) ta = _set_typeattr ( typeattr ) tmpltype . typeattrs . append ( ta ) db . DBSession . flush ( ) return ta
Add an typeattr to an existing type .
46,124
def delete_typeattr ( typeattr , ** kwargs ) : tmpltype = get_templatetype ( typeattr . type_id , user_id = kwargs . get ( 'user_id' ) ) ta = db . DBSession . query ( TypeAttr ) . filter ( TypeAttr . type_id == typeattr . type_id , TypeAttr . attr_id == typeattr . attr_id ) . one ( ) tmpltype . typeattrs . remove ( ta ) db . DBSession . flush ( ) return 'OK'
Remove an typeattr from an existing type
46,125
def validate_attr ( resource_attr_id , scenario_id , template_id = None ) : rs = db . DBSession . query ( ResourceScenario ) . filter ( ResourceScenario . resource_attr_id == resource_attr_id , ResourceScenario . scenario_id == scenario_id ) . options ( joinedload_all ( "resourceattr" ) ) . options ( joinedload_all ( "dataset" ) ) . one ( ) error = None try : _do_validate_resourcescenario ( rs , template_id ) except HydraError as e : error = JSONObject ( dict ( ref_key = rs . resourceattr . ref_key , ref_id = rs . resourceattr . get_resource_id ( ) , ref_name = rs . resourceattr . get_resource ( ) . get_name ( ) , resource_attr_id = rs . resource_attr_id , attr_id = rs . resourceattr . attr . id , attr_name = rs . resourceattr . attr . name , dataset_id = rs . dataset_id , scenario_id = scenario_id , template_id = template_id , error_text = e . args [ 0 ] ) ) return error
Check that a resource attribute satisfies the requirements of all the types of the resource .
46,126
def validate_attrs ( resource_attr_ids , scenario_id , template_id = None ) : multi_rs = db . DBSession . query ( ResourceScenario ) . filter ( ResourceScenario . resource_attr_id . in_ ( resource_attr_ids ) , ResourceScenario . scenario_id == scenario_id ) . options ( joinedload_all ( "resourceattr" ) ) . options ( joinedload_all ( "dataset" ) ) . all ( ) errors = [ ] for rs in multi_rs : try : _do_validate_resourcescenario ( rs , template_id ) except HydraError as e : error = dict ( ref_key = rs . resourceattr . ref_key , ref_id = rs . resourceattr . get_resource_id ( ) , ref_name = rs . resourceattr . get_resource ( ) . get_name ( ) , resource_attr_id = rs . resource_attr_id , attr_id = rs . resourceattr . attr . id , attr_name = rs . resourceattr . attr . name , dataset_id = rs . dataset_id , scenario_id = scenario_id , template_id = template_id , error_text = e . args [ 0 ] ) errors . append ( error ) return errors
Check that multiple resource attribute satisfy the requirements of the types of resources to which the they are attached .
46,127
def validate_network ( network_id , template_id , scenario_id = None ) : network = db . DBSession . query ( Network ) . filter ( Network . id == network_id ) . options ( noload ( 'scenarios' ) ) . first ( ) if network is None : raise HydraError ( "Could not find network %s" % ( network_id ) ) resource_scenario_dict = { } if scenario_id is not None : scenario = db . DBSession . query ( Scenario ) . filter ( Scenario . id == scenario_id ) . first ( ) if scenario is None : raise HydraError ( "Could not find scenario %s" % ( scenario_id , ) ) for rs in scenario . resourcescenarios : resource_scenario_dict [ rs . resource_attr_id ] = rs template = db . DBSession . query ( Template ) . filter ( Template . id == template_id ) . options ( joinedload_all ( 'templatetypes' ) ) . first ( ) if template is None : raise HydraError ( "Could not find template %s" % ( template_id , ) ) resource_type_defs = { 'NETWORK' : { } , 'NODE' : { } , 'LINK' : { } , 'GROUP' : { } , } for tt in template . templatetypes : resource_type_defs [ tt . resource_type ] [ tt . id ] = tt errors = [ ] if resource_type_defs . get ( 'NETWORK' ) : net_types = resource_type_defs [ 'NETWORK' ] errors . extend ( _validate_resource ( network , net_types , resource_scenario_dict ) ) if resource_type_defs . get ( 'NODE' ) : node_types = resource_type_defs [ 'NODE' ] for node in network . nodes : errors . extend ( _validate_resource ( node , node_types , resource_scenario_dict ) ) if resource_type_defs . get ( 'LINK' ) : link_types = resource_type_defs [ 'LINK' ] for link in network . links : errors . extend ( _validate_resource ( link , link_types , resource_scenario_dict ) ) if resource_type_defs . get ( 'GROUP' ) : group_types = resource_type_defs [ 'GROUP' ] for group in network . resourcegroups : errors . extend ( _validate_resource ( group , group_types , resource_scenario_dict ) ) return errors
Given a network scenario and template ensure that all the nodes links & groups in the network have the correct resource attributes as defined by the types in the template . Also ensure valid entries in tresourcetype . This validation will not fail if a resource has more than the required type but will fail if it has fewer or if any attribute has a conflicting dimension or unit .
46,128
def _make_attr_element_from_typeattr ( parent , type_attr_i ) : attr = _make_attr_element ( parent , type_attr_i . attr ) if type_attr_i . unit_id is not None : attr_unit = etree . SubElement ( attr , 'unit' ) attr_unit . text = units . get_unit ( type_attr_i . unit_id ) . abbreviation attr_is_var = etree . SubElement ( attr , 'is_var' ) attr_is_var . text = type_attr_i . attr_is_var if type_attr_i . data_type is not None : attr_data_type = etree . SubElement ( attr , 'data_type' ) attr_data_type . text = type_attr_i . data_type if type_attr_i . data_restriction is not None : attr_data_restriction = etree . SubElement ( attr , 'restrictions' ) attr_data_restriction . text = type_attr_i . data_restriction return attr
General function to add an attribute element to a resource element . resource_attr_i can also e a type_attr if being called from get_tempalte_as_xml
46,129
def _make_attr_element_from_resourceattr ( parent , resource_attr_i ) : attr = _make_attr_element ( parent , resource_attr_i . attr ) attr_is_var = etree . SubElement ( attr , 'is_var' ) attr_is_var . text = resource_attr_i . attr_is_var return attr
General function to add an attribute element to a resource element .
46,130
def _make_attr_element ( parent , attr_i ) : attr = etree . SubElement ( parent , "attribute" ) attr_name = etree . SubElement ( attr , 'name' ) attr_name . text = attr_i . name attr_desc = etree . SubElement ( attr , 'description' ) attr_desc . text = attr_i . description attr_dimension = etree . SubElement ( attr , 'dimension' ) attr_dimension . text = units . get_dimension ( attr_i . dimension_id , do_accept_dimension_id_none = True ) . name return attr
create an attribute element from an attribute DB object
46,131
def valueFromDataset ( cls , datatype , value , metadata = None , tmap = None ) : if tmap is None : tmap = typemap obj = cls . fromDataset ( datatype , value , metadata = metadata , tmap = tmap ) return obj . value
Return the value contained by dataset argument after casting to correct type and performing type - specific validation
46,132
def fromDataset ( datatype , value , metadata = None , tmap = None ) : if tmap is None : tmap = typemap return tmap [ datatype . upper ( ) ] . fromDataset ( value , metadata = metadata )
Return a representation of dataset argument as an instance
46,133
def exists_dimension ( dimension_name , ** kwargs ) : try : dimension = db . DBSession . query ( Dimension ) . filter ( Dimension . name == dimension_name ) . one ( ) return True except NoResultFound : raise False
Given a dimension returns True if it exists False otherwise
46,134
def convert_units ( values , source_measure_or_unit_abbreviation , target_measure_or_unit_abbreviation , ** kwargs ) : if numpy . isscalar ( values ) : values = [ values ] float_values = [ float ( value ) for value in values ] values_to_return = convert ( float_values , source_measure_or_unit_abbreviation , target_measure_or_unit_abbreviation ) return values_to_return
Convert a value from one unit to another one .
46,135
def convert ( values , source_measure_or_unit_abbreviation , target_measure_or_unit_abbreviation ) : source_dimension = get_dimension_by_unit_measure_or_abbreviation ( source_measure_or_unit_abbreviation ) target_dimension = get_dimension_by_unit_measure_or_abbreviation ( target_measure_or_unit_abbreviation ) if source_dimension == target_dimension : source = JSONObject ( { } ) target = JSONObject ( { } ) source . unit_abbreviation , source . factor = _parse_unit ( source_measure_or_unit_abbreviation ) target . unit_abbreviation , target . factor = _parse_unit ( target_measure_or_unit_abbreviation ) source . unit_data = get_unit_by_abbreviation ( source . unit_abbreviation ) target . unit_data = get_unit_by_abbreviation ( target . unit_abbreviation ) source . conv_factor = JSONObject ( { 'lf' : source . unit_data . lf , 'cf' : source . unit_data . cf } ) target . conv_factor = JSONObject ( { 'lf' : target . unit_data . lf , 'cf' : target . unit_data . cf } ) if isinstance ( values , float ) : return ( source . conv_factor . lf / target . conv_factor . lf * ( source . factor * values ) + ( source . conv_factor . cf - target . conv_factor . cf ) / target . conv_factor . lf ) / target . factor elif isinstance ( values , list ) : return [ ( source . conv_factor . lf / target . conv_factor . lf * ( source . factor * value ) + ( source . conv_factor . cf - target . conv_factor . cf ) / target . conv_factor . lf ) / target . factor for value in values ] else : raise HydraError ( "Unit conversion: dimensions are not consistent." )
Convert a value or a list of values from an unit to another one . The two units must represent the same physical dimension .
46,136
def get_empty_dimension ( ** kwargs ) : dimension = JSONObject ( Dimension ( ) ) dimension . id = None dimension . name = '' dimension . description = '' dimension . project_id = None dimension . units = [ ] return dimension
Returns a dimension object initialized with empty values
46,137
def get_dimension ( dimension_id , do_accept_dimension_id_none = False , ** kwargs ) : if do_accept_dimension_id_none == True and dimension_id is None : return get_empty_dimension ( ) try : dimension = db . DBSession . query ( Dimension ) . filter ( Dimension . id == dimension_id ) . one ( ) dimension . units return JSONObject ( dimension ) except NoResultFound : raise ResourceNotFoundError ( "Dimension %s not found" % ( dimension_id ) )
Given a dimension id returns all its data
46,138
def get_dimensions ( ** kwargs ) : dimensions_list = db . DBSession . query ( Dimension ) . options ( load_only ( "id" ) ) . all ( ) return_list = [ ] for dimension in dimensions_list : return_list . append ( get_dimension ( dimension . id ) ) return return_list
Returns a list of objects describing all the dimensions with all the units .
46,139
def get_dimension_by_name ( dimension_name , ** kwargs ) : try : if dimension_name is None : dimension_name = '' dimension = db . DBSession . query ( Dimension ) . filter ( func . lower ( Dimension . name ) == func . lower ( dimension_name . strip ( ) ) ) . one ( ) return get_dimension ( dimension . id ) except NoResultFound : raise ResourceNotFoundError ( "Dimension %s not found" % ( dimension_name ) )
Given a dimension name returns all its data . Used in convert functions
46,140
def get_unit ( unit_id , ** kwargs ) : try : unit = db . DBSession . query ( Unit ) . filter ( Unit . id == unit_id ) . one ( ) return JSONObject ( unit ) except NoResultFound : raise ResourceNotFoundError ( "Unit %s not found" % ( unit_id ) )
Returns a single unit
46,141
def get_units ( ** kwargs ) : units_list = db . DBSession . query ( Unit ) . all ( ) units = [ ] for unit in units_list : new_unit = JSONObject ( unit ) units . append ( new_unit ) return units
Returns all the units
46,142
def get_dimension_by_unit_measure_or_abbreviation ( measure_or_unit_abbreviation , ** kwargs ) : unit_abbreviation , factor = _parse_unit ( measure_or_unit_abbreviation ) units = db . DBSession . query ( Unit ) . filter ( Unit . abbreviation == unit_abbreviation ) . all ( ) if len ( units ) == 0 : raise HydraError ( 'Unit %s not found.' % ( unit_abbreviation ) ) elif len ( units ) > 1 : raise HydraError ( 'Unit %s has multiple dimensions not found.' % ( unit_abbreviation ) ) else : dimension = db . DBSession . query ( Dimension ) . filter ( Dimension . id == units [ 0 ] . dimension_id ) . one ( ) return str ( dimension . name )
Return the physical dimension a given unit abbreviation of a measure or the measure itself refers to . The search key is the abbreviation or the full measure
46,143
def get_unit_by_abbreviation ( unit_abbreviation , ** kwargs ) : try : if unit_abbreviation is None : unit_abbreviation = '' unit_i = db . DBSession . query ( Unit ) . filter ( Unit . abbreviation == unit_abbreviation . strip ( ) ) . one ( ) return JSONObject ( unit_i ) except NoResultFound : raise ResourceNotFoundError ( "Unit '%s' not found" % ( unit_abbreviation ) )
Returns a single unit by abbreviation . Used as utility function to resolve string to id
46,144
def update_dimension ( dimension , ** kwargs ) : db_dimension = None dimension = JSONObject ( dimension ) try : db_dimension = db . DBSession . query ( Dimension ) . filter ( Dimension . id == dimension . id ) . filter ( ) . one ( ) if "description" in dimension and dimension [ "description" ] is not None : db_dimension . description = dimension [ "description" ] if "project_id" in dimension and dimension [ "project_id" ] is not None and dimension [ "project_id" ] != "" and dimension [ "project_id" ] . isdigit ( ) : db_dimension . project_id = dimension [ "project_id" ] except NoResultFound : raise ResourceNotFoundError ( "Dimension (ID=%s) does not exist" % ( dimension . id ) ) db . DBSession . flush ( ) return JSONObject ( db_dimension )
Update a dimension in the DB . Raises and exception if the dimension does not exist . The key is ALWAYS the name and the name itself is not modificable
46,145
def delete_dimension ( dimension_id , ** kwargs ) : try : dimension = db . DBSession . query ( Dimension ) . filter ( Dimension . id == dimension_id ) . one ( ) db . DBSession . query ( Unit ) . filter ( Unit . dimension_id == dimension . id ) . delete ( ) db . DBSession . delete ( dimension ) db . DBSession . flush ( ) return True except NoResultFound : raise ResourceNotFoundError ( "Dimension (dimension_id=%s) does not exist" % ( dimension_id ) )
Delete a dimension from the DB . Raises and exception if the dimension does not exist
46,146
def bulk_add_dimensions ( dimension_list , ** kwargs ) : added_dimensions = [ ] for dimension in dimension_list : added_dimensions . append ( add_dimension ( dimension , ** kwargs ) ) return JSONObject ( { "dimensions" : added_dimensions } )
Save all the dimensions contained in the passed list .
46,147
def bulk_add_units ( unit_list , ** kwargs ) : added_units = [ ] for unit in unit_list : added_units . append ( add_unit ( unit , ** kwargs ) ) return JSONObject ( { "units" : added_units } )
Save all the units contained in the passed list with the name of their dimension .
46,148
def delete_unit ( unit_id , ** kwargs ) : try : db_unit = db . DBSession . query ( Unit ) . filter ( Unit . id == unit_id ) . one ( ) db . DBSession . delete ( db_unit ) db . DBSession . flush ( ) return True except NoResultFound : raise ResourceNotFoundError ( "Unit (ID=%s) does not exist" % ( unit_id ) )
Delete a unit from the DB . Raises and exception if the unit does not exist
46,149
def update_unit ( unit , ** kwargs ) : try : db_unit = db . DBSession . query ( Unit ) . join ( Dimension ) . filter ( Unit . id == unit [ "id" ] ) . filter ( ) . one ( ) db_unit . name = unit [ "name" ] db_unit . abbreviation = unit . abbreviation db_unit . description = unit . description db_unit . lf = unit [ "lf" ] db_unit . cf = unit [ "cf" ] if "project_id" in unit and unit [ 'project_id' ] is not None and unit [ 'project_id' ] != "" : db_unit . project_id = unit [ "project_id" ] except NoResultFound : raise ResourceNotFoundError ( "Unit (ID=%s) does not exist" % ( unit [ "id" ] ) ) db . DBSession . flush ( ) return JSONObject ( db_unit )
Update a unit in the DB . Raises and exception if the unit does not exist
46,150
def encode ( encoding , data ) : data = ensure_bytes ( data , 'utf8' ) try : return ENCODINGS_LOOKUP [ encoding ] . code + ENCODINGS_LOOKUP [ encoding ] . converter . encode ( data ) except KeyError : raise ValueError ( 'Encoding {} not supported.' . format ( encoding ) )
Encodes the given data using the encoding that is specified
46,151
def get_codec ( data ) : try : key = ensure_bytes ( data [ : CODE_LENGTH ] , 'utf8' ) codec = ENCODINGS_LOOKUP [ key ] except KeyError : raise ValueError ( 'Can not determine encoding for {}' . format ( data ) ) else : return codec
Returns the codec used to encode the given data
46,152
def _get_role_arn ( ) : role_arn = bottle . request . headers . get ( 'X-Role-ARN' ) if not role_arn : role_arn = _lookup_ip_role_arn ( bottle . request . environ . get ( 'REMOTE_ADDR' ) ) if not role_arn : role_arn = _role_arn return role_arn
Return role arn from X - Role - ARN header lookup role arn from source IP or fall back to command line default .
46,153
def _on_dynamodb_exception ( self , error ) : if isinstance ( error , exceptions . ConditionalCheckFailedException ) : raise web . HTTPError ( 409 , reason = 'Condition Check Failure' ) elif isinstance ( error , exceptions . NoCredentialsError ) : if _no_creds_should_return_429 ( ) : raise web . HTTPError ( 429 , reason = 'Instance Credentials Failure' ) elif isinstance ( error , ( exceptions . ThroughputExceeded , exceptions . ThrottlingException ) ) : raise web . HTTPError ( 429 , reason = 'Too Many Requests' ) if hasattr ( self , 'logger' ) : self . logger . error ( 'DynamoDB Error: %s' , error ) raise web . HTTPError ( 500 , reason = str ( error ) )
Dynamically handle DynamoDB exceptions returning HTTP error responses .
46,154
def marshall ( values ) : serialized = { } for key in values : serialized [ key ] = _marshall_value ( values [ key ] ) return serialized
Marshall a dict into something DynamoDB likes .
46,155
def unmarshall ( values ) : unmarshalled = { } for key in values : unmarshalled [ key ] = _unmarshall_dict ( values [ key ] ) return unmarshalled
Transform a response payload from DynamoDB to a native dict
46,156
def _marshall_value ( value ) : if PYTHON3 and isinstance ( value , bytes ) : return { 'B' : base64 . b64encode ( value ) . decode ( 'ascii' ) } elif PYTHON3 and isinstance ( value , str ) : return { 'S' : value } elif not PYTHON3 and isinstance ( value , str ) : if is_binary ( value ) : return { 'B' : base64 . b64encode ( value ) . decode ( 'ascii' ) } return { 'S' : value } elif not PYTHON3 and isinstance ( value , unicode ) : return { 'S' : value . encode ( 'utf-8' ) } elif isinstance ( value , dict ) : return { 'M' : marshall ( value ) } elif isinstance ( value , bool ) : return { 'BOOL' : value } elif isinstance ( value , ( int , float ) ) : return { 'N' : str ( value ) } elif isinstance ( value , datetime . datetime ) : return { 'S' : value . isoformat ( ) } elif isinstance ( value , uuid . UUID ) : return { 'S' : str ( value ) } elif isinstance ( value , list ) : return { 'L' : [ _marshall_value ( v ) for v in value ] } elif isinstance ( value , set ) : if PYTHON3 and all ( [ isinstance ( v , bytes ) for v in value ] ) : return { 'BS' : _encode_binary_set ( value ) } elif PYTHON3 and all ( [ isinstance ( v , str ) for v in value ] ) : return { 'SS' : sorted ( list ( value ) ) } elif all ( [ isinstance ( v , ( int , float ) ) for v in value ] ) : return { 'NS' : sorted ( [ str ( v ) for v in value ] ) } elif not PYTHON3 and all ( [ isinstance ( v , str ) for v in value ] ) and all ( [ is_binary ( v ) for v in value ] ) : return { 'BS' : _encode_binary_set ( value ) } elif not PYTHON3 and all ( [ isinstance ( v , str ) for v in value ] ) and all ( [ is_binary ( v ) is False for v in value ] ) : return { 'SS' : sorted ( list ( value ) ) } else : raise ValueError ( 'Can not mix types in a set' ) elif value is None : return { 'NULL' : True } raise ValueError ( 'Unsupported type: %s' % type ( value ) )
Recursively transform value into an AttributeValue dict
46,157
def _unmarshall_dict ( value ) : key = list ( value . keys ( ) ) . pop ( ) if key == 'B' : return base64 . b64decode ( value [ key ] . encode ( 'ascii' ) ) elif key == 'BS' : return set ( [ base64 . b64decode ( v . encode ( 'ascii' ) ) for v in value [ key ] ] ) elif key == 'BOOL' : return value [ key ] elif key == 'L' : return [ _unmarshall_dict ( v ) for v in value [ key ] ] elif key == 'M' : return unmarshall ( value [ key ] ) elif key == 'NULL' : return None elif key == 'N' : return _to_number ( value [ key ] ) elif key == 'NS' : return set ( [ _to_number ( v ) for v in value [ key ] ] ) elif key == 'S' : return value [ key ] elif key == 'SS' : return set ( [ v for v in value [ key ] ] ) raise ValueError ( 'Unsupported value type: %s' % key )
Unmarshall a single dict value from a row that was returned from DynamoDB returning the value as a normal Python dict .
46,158
def _unwrap_result ( action , result ) : if not result : return elif action in { 'DeleteItem' , 'PutItem' , 'UpdateItem' } : return _unwrap_delete_put_update_item ( result ) elif action == 'GetItem' : return _unwrap_get_item ( result ) elif action == 'Query' or action == 'Scan' : return _unwrap_query_scan ( result ) elif action == 'CreateTable' : return _unwrap_create_table ( result ) elif action == 'DescribeTable' : return _unwrap_describe_table ( result ) return result
Unwrap a request response and return only the response data .
46,159
def list_tables ( self , exclusive_start_table_name = None , limit = None ) : payload = { } if exclusive_start_table_name : payload [ 'ExclusiveStartTableName' ] = exclusive_start_table_name if limit : payload [ 'Limit' ] = limit return self . execute ( 'ListTables' , payload )
Invoke the ListTables _ function .
46,160
def get_item ( self , table_name , key_dict , consistent_read = False , expression_attribute_names = None , projection_expression = None , return_consumed_capacity = None ) : payload = { 'TableName' : table_name , 'Key' : utils . marshall ( key_dict ) , 'ConsistentRead' : consistent_read } if expression_attribute_names : payload [ 'ExpressionAttributeNames' ] = expression_attribute_names if projection_expression : payload [ 'ProjectionExpression' ] = projection_expression if return_consumed_capacity : _validate_return_consumed_capacity ( return_consumed_capacity ) payload [ 'ReturnConsumedCapacity' ] = return_consumed_capacity return self . execute ( 'GetItem' , payload )
Invoke the GetItem _ function .
46,161
def update_item ( self , table_name , key_dict , condition_expression = None , update_expression = None , expression_attribute_names = None , expression_attribute_values = None , return_consumed_capacity = None , return_item_collection_metrics = None , return_values = None ) : payload = { 'TableName' : table_name , 'Key' : utils . marshall ( key_dict ) , 'UpdateExpression' : update_expression } if condition_expression : payload [ 'ConditionExpression' ] = condition_expression if expression_attribute_names : payload [ 'ExpressionAttributeNames' ] = expression_attribute_names if expression_attribute_values : payload [ 'ExpressionAttributeValues' ] = utils . marshall ( expression_attribute_values ) if return_consumed_capacity : _validate_return_consumed_capacity ( return_consumed_capacity ) payload [ 'ReturnConsumedCapacity' ] = return_consumed_capacity if return_item_collection_metrics : _validate_return_item_collection_metrics ( return_item_collection_metrics ) payload [ 'ReturnItemCollectionMetrics' ] = return_item_collection_metrics if return_values : _validate_return_values ( return_values ) payload [ 'ReturnValues' ] = return_values return self . execute ( 'UpdateItem' , payload )
Invoke the UpdateItem _ function .
46,162
def query ( self , table_name , index_name = None , consistent_read = None , key_condition_expression = None , filter_expression = None , expression_attribute_names = None , expression_attribute_values = None , projection_expression = None , select = None , exclusive_start_key = None , limit = None , scan_index_forward = True , return_consumed_capacity = None ) : payload = { 'TableName' : table_name , 'ScanIndexForward' : scan_index_forward } if index_name : payload [ 'IndexName' ] = index_name if consistent_read is not None : payload [ 'ConsistentRead' ] = consistent_read if key_condition_expression : payload [ 'KeyConditionExpression' ] = key_condition_expression if filter_expression : payload [ 'FilterExpression' ] = filter_expression if expression_attribute_names : payload [ 'ExpressionAttributeNames' ] = expression_attribute_names if expression_attribute_values : payload [ 'ExpressionAttributeValues' ] = utils . marshall ( expression_attribute_values ) if projection_expression : payload [ 'ProjectionExpression' ] = projection_expression if select : _validate_select ( select ) payload [ 'Select' ] = select if exclusive_start_key : payload [ 'ExclusiveStartKey' ] = utils . marshall ( exclusive_start_key ) if limit : payload [ 'Limit' ] = limit if return_consumed_capacity : _validate_return_consumed_capacity ( return_consumed_capacity ) payload [ 'ReturnConsumedCapacity' ] = return_consumed_capacity return self . execute ( 'Query' , payload )
A Query _ operation uses the primary key of a table or a secondary index to directly access items from that table or index .
46,163
def scan ( self , table_name , index_name = None , consistent_read = None , projection_expression = None , filter_expression = None , expression_attribute_names = None , expression_attribute_values = None , segment = None , total_segments = None , select = None , limit = None , exclusive_start_key = None , return_consumed_capacity = None ) : payload = { 'TableName' : table_name } if index_name : payload [ 'IndexName' ] = index_name if consistent_read is not None : payload [ 'ConsistentRead' ] = consistent_read if filter_expression : payload [ 'FilterExpression' ] = filter_expression if expression_attribute_names : payload [ 'ExpressionAttributeNames' ] = expression_attribute_names if expression_attribute_values : payload [ 'ExpressionAttributeValues' ] = utils . marshall ( expression_attribute_values ) if projection_expression : payload [ 'ProjectionExpression' ] = projection_expression if segment : payload [ 'Segment' ] = segment if total_segments : payload [ 'TotalSegments' ] = total_segments if select : _validate_select ( select ) payload [ 'Select' ] = select if exclusive_start_key : payload [ 'ExclusiveStartKey' ] = utils . marshall ( exclusive_start_key ) if limit : payload [ 'Limit' ] = limit if return_consumed_capacity : _validate_return_consumed_capacity ( return_consumed_capacity ) payload [ 'ReturnConsumedCapacity' ] = return_consumed_capacity return self . execute ( 'Scan' , payload )
The Scan _ operation returns one or more items and item attributes by accessing every item in a table or a secondary index .
46,164
def execute ( self , action , parameters ) : measurements = collections . deque ( [ ] , self . _max_retries ) for attempt in range ( 1 , self . _max_retries + 1 ) : try : result = yield self . _execute ( action , parameters , attempt , measurements ) except ( exceptions . InternalServerError , exceptions . RequestException , exceptions . ThrottlingException , exceptions . ThroughputExceeded , exceptions . ServiceUnavailable ) as error : if attempt == self . _max_retries : if self . _instrumentation_callback : self . _instrumentation_callback ( measurements ) self . _on_exception ( error ) duration = self . _sleep_duration ( attempt ) self . logger . warning ( '%r on attempt %i, sleeping %.2f seconds' , error , attempt , duration ) yield gen . sleep ( duration ) except exceptions . DynamoDBException as error : if self . _instrumentation_callback : self . _instrumentation_callback ( measurements ) self . _on_exception ( error ) else : if self . _instrumentation_callback : self . _instrumentation_callback ( measurements ) self . logger . debug ( '%s result: %r' , action , result ) raise gen . Return ( _unwrap_result ( action , result ) )
Execute a DynamoDB action with the given parameters . The method will retry requests that failed due to OS level errors or when being throttled by DynamoDB .
46,165
def set_error_callback ( self , callback ) : self . logger . debug ( 'Setting error callback: %r' , callback ) self . _on_error = callback
Assign a method to invoke when a request has encountered an unrecoverable error in an action execution .
46,166
def set_instrumentation_callback ( self , callback ) : self . logger . debug ( 'Setting instrumentation callback: %r' , callback ) self . _instrumentation_callback = callback
Assign a method to invoke when a request has completed gathering measurements .
46,167
def _execute ( self , action , parameters , attempt , measurements ) : future = concurrent . Future ( ) start = time . time ( ) def handle_response ( request ) : self . _on_response ( action , parameters . get ( 'TableName' , 'Unknown' ) , attempt , start , request , future , measurements ) ioloop . IOLoop . current ( ) . add_future ( self . _client . fetch ( 'POST' , '/' , body = json . dumps ( parameters ) . encode ( 'utf-8' ) , headers = { 'x-amz-target' : 'DynamoDB_20120810.{}' . format ( action ) , 'Content-Type' : 'application/x-amz-json-1.0' , } ) , handle_response ) return future
Invoke a DynamoDB action
46,168
def _on_response ( self , action , table , attempt , start , response , future , measurements ) : self . logger . debug ( '%s on %s request #%i = %r' , action , table , attempt , response ) now , exception = time . time ( ) , None try : future . set_result ( self . _process_response ( response ) ) except aws_exceptions . ConfigNotFound as error : exception = exceptions . ConfigNotFound ( str ( error ) ) except aws_exceptions . ConfigParserError as error : exception = exceptions . ConfigParserError ( str ( error ) ) except aws_exceptions . NoCredentialsError as error : exception = exceptions . NoCredentialsError ( str ( error ) ) except aws_exceptions . NoProfileError as error : exception = exceptions . NoProfileError ( str ( error ) ) except aws_exceptions . AWSError as error : exception = exceptions . DynamoDBException ( error ) except ( ConnectionError , ConnectionResetError , OSError , aws_exceptions . RequestException , ssl . SSLError , _select . error , ssl . socket_error , socket . gaierror ) as error : exception = exceptions . RequestException ( str ( error ) ) except TimeoutError : exception = exceptions . TimeoutException ( ) except httpclient . HTTPError as error : if error . code == 599 : exception = exceptions . TimeoutException ( ) else : exception = exceptions . RequestException ( getattr ( getattr ( error , 'response' , error ) , 'body' , str ( error . code ) ) ) except Exception as error : exception = error if exception : future . set_exception ( exception ) measurements . append ( Measurement ( now , action , table , attempt , max ( now , start ) - start , exception . __class__ . __name__ if exception else exception ) )
Invoked when the HTTP request to the DynamoDB has returned and is responsible for setting the future result or exception based upon the HTTP response provided .
46,169
def _process_response ( response ) : error = response . exception ( ) if error : if isinstance ( error , aws_exceptions . AWSError ) : if error . args [ 1 ] [ 'type' ] in exceptions . MAP : raise exceptions . MAP [ error . args [ 1 ] [ 'type' ] ] ( error . args [ 1 ] [ 'message' ] ) raise error http_response = response . result ( ) if not http_response or not http_response . body : raise exceptions . DynamoDBException ( 'empty response' ) return json . loads ( http_response . body . decode ( 'utf-8' ) )
Process the raw AWS response returning either the mapped exception or deserialized response .
46,170
def write ( self , obj , resource_id = None ) : if resource_id is not None : if self . read ( resource_id ) : raise ValueError ( "There are one object already with this id." ) obj [ '_id' ] = resource_id prepared_creation_tx = self . driver . instance . transactions . prepare ( operation = 'CREATE' , signers = self . user . public_key , asset = { 'namespace' : self . namespace , 'data' : obj } , metadata = { 'namespace' : self . namespace , 'data' : obj } ) signed_tx = self . driver . instance . transactions . fulfill ( prepared_creation_tx , private_keys = self . user . private_key ) self . logger . debug ( 'bdb::write::{}' . format ( signed_tx [ 'id' ] ) ) self . driver . instance . transactions . send_commit ( signed_tx ) return signed_tx
Write and obj in bdb .
46,171
def _get ( self , tx_id ) : value = [ { 'data' : transaction [ 'metadata' ] , 'id' : transaction [ 'id' ] } for transaction in self . driver . instance . transactions . get ( asset_id = self . get_asset_id ( tx_id ) ) ] [ - 1 ] if value [ 'data' ] [ 'data' ] : self . logger . debug ( 'bdb::read::{}' . format ( value [ 'data' ] ) ) return value else : return False
Read and obj in bdb using the tx_id .
46,172
def _update ( self , metadata , tx_id , resource_id ) : try : if not tx_id : sent_tx = self . write ( metadata , resource_id ) self . logger . debug ( 'bdb::put::{}' . format ( sent_tx [ 'id' ] ) ) return sent_tx else : txs = self . driver . instance . transactions . get ( asset_id = self . get_asset_id ( tx_id ) ) unspent = txs [ - 1 ] sent_tx = self . _put ( metadata , unspent , resource_id ) self . logger . debug ( 'bdb::put::{}' . format ( sent_tx ) ) return sent_tx except BadRequest as e : logging . error ( e )
Update and obj in bdb using the tx_id .
46,173
def query ( self , search_model : QueryModel ) : self . logger . debug ( 'bdb::get::{}' . format ( search_model . query ) ) assets = json . loads ( requests . post ( "http://localhost:4000/query" , data = search_model . query ) . content ) [ 'data' ] self . logger . debug ( 'bdb::result::len {}' . format ( len ( assets ) ) ) assets_metadata = [ ] for i in assets : try : assets_metadata . append ( self . _get ( i [ 'id' ] ) [ 'data' ] [ 'data' ] ) except : pass return assets_metadata
Query to bdb namespace .
46,174
def get_asset_id ( self , tx_id ) : tx = self . driver . instance . transactions . retrieve ( txid = tx_id ) assert tx is not None return tx [ 'id' ] if tx [ 'operation' ] == 'CREATE' else tx [ 'asset' ] [ 'id' ]
Return the tx_id of the first transaction .
46,175
def hdr ( data , filename ) : hdrobj = data if isinstance ( data , HDRobject ) else HDRobject ( data ) hdrobj . write ( filename )
write ENVI header files
46,176
def write ( self , filename = 'same' ) : if filename == 'same' : filename = self . filename if not filename . endswith ( '.hdr' ) : filename += '.hdr' with open ( filename , 'w' ) as out : out . write ( self . __str__ ( ) )
write object to an ENVI header file
46,177
def GoZero ( self , speed ) : ' Go to Zero position ' self . ReleaseSW ( ) spi . SPI_write_byte ( self . CS , 0x82 | ( self . Dir & 1 ) ) spi . SPI_write_byte ( self . CS , 0x00 ) spi . SPI_write_byte ( self . CS , speed ) while self . IsBusy ( ) : pass time . sleep ( 0.3 ) self . ReleaseSW ( )
Go to Zero position
46,178
def ReadStatusBit ( self , bit ) : ' Report given status bit ' spi . SPI_write_byte ( self . CS , 0x39 ) spi . SPI_write_byte ( self . CS , 0x00 ) data0 = spi . SPI_read_byte ( ) spi . SPI_write_byte ( self . CS , 0x00 ) data1 = spi . SPI_read_byte ( ) if bit > 7 : OutputBit = ( data0 >> ( bit - 8 ) ) & 1 else : OutputBit = ( data1 >> bit ) & 1 return OutputBit
Report given status bit
46,179
def SPI_write_byte ( self , chip_select , data ) : 'Writes a data to a SPI device selected by chipselect bit. ' self . bus . write_byte_data ( self . address , chip_select , data )
Writes a data to a SPI device selected by chipselect bit .
46,180
def SPI_write ( self , chip_select , data ) : 'Writes data to SPI device selected by chipselect bit. ' dat = list ( data ) dat . insert ( 0 , chip_select ) return self . bus . write_i2c_block ( self . address , dat )
Writes data to SPI device selected by chipselect bit .
46,181
def SPI_config ( self , config ) : 'Configure SPI interface parameters.' self . bus . write_byte_data ( self . address , 0xF0 , config ) return self . bus . read_byte_data ( self . address , 0xF0 )
Configure SPI interface parameters .
46,182
def GPIO_read ( self ) : 'Reads logic state on GPIO enabled slave-selects pins.' self . bus . write_byte_data ( self . address , 0xF5 , 0x0f ) status = self . bus . read_byte ( self . address ) bits_values = dict ( [ ( 'SS0' , status & 0x01 == 0x01 ) , ( 'SS1' , status & 0x02 == 0x02 ) , ( 'SS2' , status & 0x04 == 0x04 ) , ( 'SS3' , status & 0x08 == 0x08 ) ] ) return bits_values
Reads logic state on GPIO enabled slave - selects pins .
46,183
def GPIO_config ( self , gpio_enable , gpio_config ) : 'Enable or disable slave-select pins as gpio.' self . bus . write_byte_data ( self . address , 0xF6 , gpio_enable ) self . bus . write_byte_data ( self . address , 0xF7 , gpio_config ) return
Enable or disable slave - select pins as gpio .
46,184
def get_address ( self ) : LOGGER . debug ( "Reading RPS01A sensor's address." , ) return self . bus . read_byte_data ( self . address , self . address_reg )
Returns sensors I2C address .
46,185
def get_zero_position ( self ) : LSB = self . bus . read_byte_data ( self . address , self . zero_position_MSB ) MSB = self . bus . read_byte_data ( self . address , self . zero_position_LSB ) DATA = ( MSB << 6 ) + LSB return DATA
Returns programmed zero position in OTP memory .
46,186
def get_agc_value ( self ) : LOGGER . debug ( "Reading RPS01A sensor's AGC settings" , ) return self . bus . read_byte_data ( self . address , self . AGC_reg )
Returns sensor s Automatic Gain Control actual value . 0 - Represents high magtetic field 0xFF - Represents low magnetic field
46,187
def get_angle ( self , verify = False ) : LSB = self . bus . read_byte_data ( self . address , self . angle_LSB ) MSB = self . bus . read_byte_data ( self . address , self . angle_MSB ) DATA = ( MSB << 6 ) + LSB if not verify : return ( 360.0 / 2 ** 14 ) * DATA else : status = self . get_diagnostics ( ) if not ( status [ 'Comp_Low' ] ) and not ( status [ 'Comp_High' ] ) and not ( status [ 'COF' ] ) : return ( 360.0 / 2 ** 14 ) * DATA else : return None
Retuns measured angle in degrees in range 0 - 360 .
46,188
def main ( input_bed , output_file , output_features = False , genome = None , only_canonical = False , short = False , extended = False , high_confidence = False , ambiguities_method = False , coding_only = False , collapse_exons = False , work_dir = False , is_debug = False ) : logger . init ( is_debug_ = is_debug ) if not genome : raise click . BadParameter ( 'Error: please, specify genome build name with -g (e.g. `-g hg19`)' , param = 'genome' ) if short : if extended : raise click . BadParameter ( '--short and --extended can\'t be set both' , param = 'extended' ) if output_features : raise click . BadParameter ( '--short and --output-features can\'t be set both' , param = 'output_features' ) elif output_features or extended : extended = True short = False if not verify_file ( input_bed ) : click . BadParameter ( f'Usage: {__file__} Input_BED_file -g hg19 -o Annotated_BED_file [options]' , param = 'input_bed' ) input_bed = verify_file ( input_bed , is_critical = True , description = f'Input BED file for {__file__}' ) if work_dir : work_dir = join ( adjust_path ( work_dir ) , os . path . splitext ( basename ( input_bed ) ) [ 0 ] ) safe_mkdir ( work_dir ) info ( f'Created work directory {work_dir}' ) else : work_dir = mkdtemp ( 'bed_annotate' ) debug ( 'Created temporary work directory {work_dir}' ) input_bed = clean_bed ( input_bed , work_dir ) input_bed = verify_bed ( input_bed , is_critical = True , description = f'Input BED file for {__file__} after cleaning' ) output_file = adjust_path ( output_file ) output_file = annotate ( input_bed , output_file , work_dir , genome = genome , only_canonical = only_canonical , short = short , extended = extended , high_confidence = high_confidence , collapse_exons = collapse_exons , output_features = output_features , ambiguities_method = ambiguities_method , coding_only = coding_only , is_debug = is_debug ) if not work_dir : debug ( f'Removing work directory {work_dir}' ) shutil . rmtree ( work_dir ) info ( f'Done, saved to {output_file}' )
Annotating BED file based on reference features annotations .
46,189
def StateOfCharge ( self ) : return ( self . bus . read_byte_data ( self . address , 0x02 ) + self . bus . read_byte_data ( self . address , 0x03 ) * 256 )
% of Full Charge
46,190
def Chemistry ( self ) : length = self . bus . read_byte_data ( self . address , 0x79 ) chem = [ ] for n in range ( length ) : chem . append ( self . bus . read_byte_data ( self . address , 0x7A + n ) ) return chem
Get cells chemistry
46,191
def single_read ( self , register ) : comm_reg = ( 0b00010 << 3 ) + register if register == self . AD7730_STATUS_REG : bytes_num = 1 elif register == self . AD7730_DATA_REG : bytes_num = 3 elif register == self . AD7730_MODE_REG : bytes_num = 2 elif register == self . AD7730_FILTER_REG : bytes_num = 3 elif register == self . AD7730_DAC_REG : bytes_num = 1 elif register == self . AD7730_OFFSET_REG : bytes_num = 3 elif register == self . AD7730_GAIN_REG : bytes_num = 3 elif register == self . AD7730_TEST_REG : bytes_num = 3 command = [ comm_reg ] + ( [ 0x00 ] * bytes_num ) spi . SPI_write ( self . CS , command ) data = spi . SPI_read ( bytes_num + 1 ) return data [ 1 : ]
Reads data from desired register only once .
46,192
def getStatus ( self ) : status = self . single_read ( self . AD7730_STATUS_REG ) bits_values = dict ( [ ( 'NOREF' , status [ 0 ] & 0x10 == 0x10 ) , ( 'STBY' , status [ 0 ] & 0x20 == 0x20 ) , ( 'STDY' , status [ 0 ] & 0x40 == 0x40 ) , ( 'RDY' , status [ 0 ] & 0x80 == 0x80 ) ] ) return bits_values
RDY - Ready Bit . This bit provides the status of the RDY flag from the part . The status and function of this bit is the same as the RDY output pin . A number of events set the RDY bit high as indicated in Table XVIII in datasheet
46,193
def _calculate_checksum ( value ) : polynomial = 0x131 crc = 0xFF for byteCtr in [ ord ( x ) for x in struct . pack ( ">H" , value ) ] : crc ^= byteCtr for bit in range ( 8 , 0 , - 1 ) : if crc & 0x80 : crc = ( crc << 1 ) ^ polynomial else : crc = ( crc << 1 ) return crc
4 . 12 Checksum Calculation from an unsigned short input
46,194
def run ( self ) : inside = 0 for draws in range ( 1 , self . data [ 'samples' ] ) : r1 , r2 = ( random ( ) , random ( ) ) if r1 ** 2 + r2 ** 2 < 1.0 : inside += 1 if draws % 1000 != 0 : continue yield self . emit ( 'log' , { 'draws' : draws , 'inside' : inside } ) p = inside / draws pi = { 'estimate' : 4.0 * inside / draws , 'uncertainty' : 4.0 * math . sqrt ( draws * p * ( 1.0 - p ) ) / draws , } yield self . set_state ( pi = pi ) yield self . emit ( 'log' , { 'action' : 'done' } )
Run when button is pressed .
46,195
def init_datastores ( self ) : self . data = Datastore ( self . id_ ) self . data . subscribe ( lambda data : self . emit ( 'data' , data ) ) self . class_data = Datastore ( type ( self ) . __name__ ) self . class_data . subscribe ( lambda data : self . emit ( 'class_data' , data ) )
Initialize datastores for this analysis instance .
46,196
def emit ( self , signal , message = '__nomessagetoken__' ) : if signal == 'log' : self . log_backend . info ( message ) elif signal == 'warn' : self . log_backend . warn ( message ) elif signal == 'error' : self . log_backend . error ( message ) return self . emit_to_frontend ( signal , message )
Emit a signal to the frontend .
46,197
def set ( self , i , value ) : value_encoded = encode ( value , self . get_change_trigger ( i ) ) if i in self . data and self . data [ i ] == value_encoded : return self self . data [ i ] = value_encoded return self . trigger_changed ( i )
Set value at position i and return a Future .
46,198
def set ( self , key , value ) : return DatastoreLegacy . store [ self . domain ] . set ( key , value )
Set value at key and return a Future
46,199
def watch ( ) : sphinx_build [ '-b' , 'html' , '-E' , 'docs' , 'docs/_build/html' ] & FG handler = ShellCommandTrick ( shell_command = 'sphinx-build -b html docs docs/_build/html' , patterns = [ '*.rst' , '*.py' ] , ignore_patterns = [ '_build/*' ] , ignore_directories = [ '.tox' ] , drop_during_process = True ) observer = Observer ( ) observe_with ( observer , handler , pathnames = [ '.' ] , recursive = True )
Renerate documentation when it changes .