idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
46,000
def add_resourcegroupitems ( scenario_id , items , scenario = None , ** kwargs ) : user_id = int ( kwargs . get ( 'user_id' ) ) if scenario is None : scenario = _get_scenario ( scenario_id , user_id ) _check_network_ownership ( scenario . network_id , user_id ) newitems = [ ] for group_item in items : group_item_i = _add_resourcegroupitem ( group_item , scenario . id ) newitems . append ( group_item_i ) db . DBSession . flush ( ) return newitems
Get all the items in a group in a scenario .
46,001
def get_plugins ( ** kwargs ) : plugins = [ ] plugin_paths = [ ] base_plugin_dir = config . get ( 'plugin' , 'default_directory' ) plugin_xsd_path = config . get ( 'plugin' , 'plugin_xsd_path' ) base_plugin_dir_contents = os . listdir ( base_plugin_dir ) for directory in base_plugin_dir_contents : if directory [ 0 ] == '.' or directory == 'xml' : continue path = os . path . join ( base_plugin_dir , directory ) if os . path . isdir ( path ) : plugin_paths . append ( path ) xmlschema_doc = etree . parse ( plugin_xsd_path ) xmlschema = etree . XMLSchema ( xmlschema_doc ) for plugin_dir in plugin_paths : full_plugin_path = os . path . join ( plugin_dir , 'trunk' ) dir_contents = os . listdir ( full_plugin_path ) for file_name in dir_contents : file_path = os . path . join ( full_plugin_path , file_name ) if file_name == 'plugin.xml' : f = open ( file_path , 'r' ) try : y = open ( file_path , 'r' ) xml_tree = etree . parse ( y ) xmlschema . assertValid ( xml_tree ) plugins . append ( etree . tostring ( xml_tree ) ) except Exception as e : log . critical ( "Schema %s did not validate! (error was %s)" % ( file_name , e ) ) break else : log . warning ( "No xml plugin details found for %s. Ignoring" , plugin_dir ) return plugins
Get all available plugins
46,002
def run_plugin ( plugin , ** kwargs ) : args = [ sys . executable ] home = os . path . expanduser ( '~' ) path_to_plugin = os . path . join ( home , 'svn/HYDRA/HydraPlugins' , plugin . location ) args . append ( path_to_plugin ) plugin_params = " " for p in plugin . params : param = "--%s=%s " % ( p . name , p . value ) args . append ( "--%s" % p . name ) args . append ( p . value ) plugin_params = plugin_params + param log_dir = config . get ( 'plugin' , 'result_file' ) log_file = os . path . join ( home , log_dir , plugin . name ) try : f = open ( log_file , 'r' ) f . read ( ) except : f = open ( log_file , 'w' ) f . close ( ) f = open ( log_file , 'r' ) pid = subprocess . Popen ( args ) . pid log . info ( "Process started! PID: %s" , pid ) return str ( pid )
Run a plugin
46,003
def create_mysql_db ( db_url ) : db_url = db_url . strip ( ) . strip ( '/' ) if db_url . find ( 'mysql' ) >= 0 : db_name = config . get ( 'mysqld' , 'db_name' , 'hydradb' ) if db_url . find ( db_name ) >= 0 : no_db_url = db_url . rsplit ( "/" , 1 ) [ 0 ] else : if db_url . find ( '@' ) == - 1 : raise HydraError ( "No Hostname specified in DB url" ) host_and_db_name = db_url . split ( '@' ) [ 1 ] if host_and_db_name . find ( '/' ) >= 0 : no_db_url , db_name = db_url . rsplit ( "/" , 1 ) else : no_db_url = db_url db_url = no_db_url + "/" + db_name db_url = "{}?charset=utf8&use_unicode=1" . format ( db_url ) if config . get ( 'mysqld' , 'auto_create' , 'Y' ) == 'Y' : tmp_engine = create_engine ( no_db_url ) log . warning ( "Creating database {0} as it does not exist." . format ( db_name ) ) tmp_engine . execute ( "CREATE DATABASE IF NOT EXISTS {0}" . format ( db_name ) ) return db_url
To simplify deployment create the mysql DB if it s not there . Accepts a URL with or without a DB name stated and returns a db url containing the db name for use in the main sqlalchemy engine .
46,004
def add_project ( project , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) existing_proj = get_project_by_name ( project . name , user_id = user_id ) if len ( existing_proj ) > 0 : raise HydraError ( "A Project with the name \"%s\" already exists" % ( project . name , ) ) proj_i = Project ( ) proj_i . name = project . name proj_i . description = project . description proj_i . created_by = user_id attr_map = hdb . add_resource_attributes ( proj_i , project . attributes ) db . DBSession . flush ( ) proj_data = _add_project_attribute_data ( proj_i , attr_map , project . attribute_data ) proj_i . attribute_data = proj_data proj_i . set_owner ( user_id ) db . DBSession . add ( proj_i ) db . DBSession . flush ( ) return proj_i
Add a new project returns a project complexmodel
46,005
def update_project ( project , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) proj_i = _get_project ( project . id ) proj_i . check_write_permission ( user_id ) proj_i . name = project . name proj_i . description = project . description attr_map = hdb . add_resource_attributes ( proj_i , project . attributes ) proj_data = _add_project_attribute_data ( proj_i , attr_map , project . attribute_data ) proj_i . attribute_data = proj_data db . DBSession . flush ( ) return proj_i
Update a project returns a project complexmodel
46,006
def get_project_by_network_id ( network_id , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) projects_i = db . DBSession . query ( Project ) . join ( ProjectOwner ) . join ( Network , Project . id == Network . project_id ) . filter ( Network . id == network_id , ProjectOwner . user_id == user_id ) . order_by ( 'name' ) . all ( ) ret_project = None for project_i in projects_i : try : project_i . check_read_permission ( user_id ) ret_project = project_i except : log . info ( "Can't return project %s. User %s does not have permission to read it." , project_i . id , user_id ) return ret_project
get a project complexmodel by a network_id
46,007
def get_projects ( uid , include_shared_projects = True , projects_ids_list_filter = None , ** kwargs ) : req_user_id = kwargs . get ( 'user_id' ) projects_qry = db . DBSession . query ( Project ) log . info ( "Getting projects for %s" , uid ) if include_shared_projects is True : projects_qry = projects_qry . join ( ProjectOwner ) . filter ( Project . status == 'A' , or_ ( ProjectOwner . user_id == uid , Project . created_by == uid ) ) else : projects_qry = projects_qry . join ( ProjectOwner ) . filter ( Project . created_by == uid ) if projects_ids_list_filter is not None : if isinstance ( projects_ids_list_filter , str ) : projects_ids_list_filter = eval ( projects_ids_list_filter ) if type ( projects_ids_list_filter ) is int : projects_qry = projects_qry . filter ( Project . id == projects_ids_list_filter ) else : projects_qry = projects_qry . filter ( Project . id . in_ ( projects_ids_list_filter ) ) projects_qry = projects_qry . options ( noload ( 'networks' ) ) . order_by ( 'id' ) projects_i = projects_qry . all ( ) log . info ( "Project query done for user %s. %s projects found" , uid , len ( projects_i ) ) user = db . DBSession . query ( User ) . filter ( User . id == req_user_id ) . one ( ) isadmin = user . is_admin ( ) projects_j = [ ] for project_i in projects_i : project_i . check_read_permission ( req_user_id ) project_i . owners network_qry = db . DBSession . query ( Network ) . filter ( Network . project_id == project_i . id , Network . status == 'A' ) if not isadmin : network_qry . outerjoin ( NetworkOwner ) . filter ( or_ ( and_ ( NetworkOwner . user_id != None , NetworkOwner . view == 'Y' ) , Network . created_by == uid ) ) networks_i = network_qry . all ( ) networks_j = [ ] for network_i in networks_i : network_i . owners net_j = JSONObject ( network_i ) if net_j . layout is not None : net_j . layout = JSONObject ( net_j . layout ) else : net_j . layout = JSONObject ( { } ) networks_j . append ( net_j ) project_j = JSONObject ( project_i ) project_j . networks = networks_j projects_j . append ( project_j ) log . info ( "Networks loaded projects for user %s" , uid ) return projects_j
Get all the projects owned by the specified user . These include projects created by the user but also ones shared with the user . For shared projects only include networks in those projects which are accessible to the user .
46,008
def get_networks ( project_id , include_data = 'N' , ** kwargs ) : log . info ( "Getting networks for project %s" , project_id ) user_id = kwargs . get ( 'user_id' ) project = _get_project ( project_id ) project . check_read_permission ( user_id ) rs = db . DBSession . query ( Network . id , Network . status ) . filter ( Network . project_id == project_id ) . all ( ) networks = [ ] for r in rs : if r . status != 'A' : continue try : net = network . get_network ( r . id , summary = True , include_data = include_data , ** kwargs ) log . info ( "Network %s retrieved" , net . name ) networks . append ( net ) except PermissionError : log . info ( "Not returning network %s as user %s does not have " "permission to read it." % ( r . id , user_id ) ) return networks
Get all networks in a project Returns an array of network objects .
46,009
def get_network_project ( network_id , ** kwargs ) : net_proj = db . DBSession . query ( Project ) . join ( Network , and_ ( Project . id == Network . id , Network . id == network_id ) ) . first ( ) if net_proj is None : raise HydraError ( "Network %s not found" % network_id ) return net_proj
get the project that a network is in
46,010
def add_resource_types ( resource_i , types ) : if types is None : return [ ] existing_type_ids = [ ] if resource_i . types : for t in resource_i . types : existing_type_ids . append ( t . type_id ) new_type_ids = [ ] for templatetype in types : if templatetype . id in existing_type_ids : continue rt_i = ResourceType ( ) rt_i . type_id = templatetype . id rt_i . ref_key = resource_i . ref_key if resource_i . ref_key == 'NODE' : rt_i . node_id = resource_i . id elif resource_i . ref_key == 'LINK' : rt_i . link_id = resource_i . id elif resource_i . ref_key == 'GROUP' : rt_i . group_id = resource_i . id resource_i . types . append ( rt_i ) new_type_ids . append ( templatetype . id ) return new_type_ids
Save a reference to the types used for this resource .
46,011
def create_default_units_and_dimensions ( ) : default_units_file_location = os . path . realpath ( os . path . join ( os . path . dirname ( os . path . realpath ( __file__ ) ) , '../' , 'static' , 'default_units_and_dimensions.json' ) ) d = None with open ( default_units_file_location ) as json_data : d = json . load ( json_data ) json_data . close ( ) for json_dimension in d [ "dimension" ] : new_dimension = None dimension_name = get_utf8_encoded_string ( json_dimension [ "name" ] ) db_dimensions_by_name = db . DBSession . query ( Dimension ) . filter ( Dimension . name == dimension_name ) . all ( ) if len ( db_dimensions_by_name ) == 0 : log . debug ( "Adding Dimension `{}`" . format ( dimension_name ) ) new_dimension = Dimension ( ) if "id" in json_dimension : new_dimension . id = json_dimension [ "id" ] new_dimension . name = dimension_name db . DBSession . add ( new_dimension ) db . DBSession . flush ( ) new_dimension = get_dimension_from_db_by_name ( dimension_name ) for json_unit in json_dimension [ "unit" ] : db_units_by_name = db . DBSession . query ( Unit ) . filter ( Unit . abbreviation == get_utf8_encoded_string ( json_unit [ 'abbr' ] ) ) . all ( ) if len ( db_units_by_name ) == 0 : log . debug ( "Adding Unit %s in %s" , json_unit [ 'abbr' ] , json_dimension [ "name" ] ) new_unit = Unit ( ) if "id" in json_unit : new_unit . id = json_unit [ "id" ] new_unit . dimension_id = new_dimension . id new_unit . name = get_utf8_encoded_string ( json_unit [ 'name' ] ) new_unit . abbreviation = get_utf8_encoded_string ( json_unit [ 'abbr' ] ) new_unit . lf = get_utf8_encoded_string ( json_unit [ 'lf' ] ) new_unit . cf = get_utf8_encoded_string ( json_unit [ 'cf' ] ) if "description" in json_unit : new_unit . description = get_utf8_encoded_string ( json_unit [ "description" ] ) db . DBSession . add ( new_unit ) db . DBSession . flush ( ) else : pass try : db . DBSession . commit ( ) except Exception as e : pass return
Adds the units and the dimensions reading a json file . It adds only dimensions and units that are not inside the db It is possible adding new dimensions and units to the DB just modifiyin the json file
46,012
def get_dimension_from_db_by_name ( dimension_name ) : try : dimension = db . DBSession . query ( Dimension ) . filter ( Dimension . name == dimension_name ) . one ( ) return JSONObject ( dimension ) except NoResultFound : raise ResourceNotFoundError ( "Dimension %s not found" % ( dimension_name ) )
Gets a dimension from the DB table .
46,013
def get_rules ( scenario_id , ** kwargs ) : rules = db . DBSession . query ( Rule ) . filter ( Rule . scenario_id == scenario_id , Rule . status == 'A' ) . all ( ) return rules
Get all the rules for a given scenario .
46,014
def get_attribute_by_name_and_dimension ( name , dimension_id = None , ** kwargs ) : try : attr_i = db . DBSession . query ( Attr ) . filter ( and_ ( Attr . name == name , Attr . dimension_id == dimension_id ) ) . one ( ) log . debug ( "Attribute retrieved" ) return attr_i except NoResultFound : return None
Get a specific attribute by its name . dimension_id can be None because in attribute the dimension_id is not anymore mandatory
46,015
def add_attributes ( attrs , ** kwargs ) : all_attrs = db . DBSession . query ( Attr ) . all ( ) attr_dict = { } for attr in all_attrs : attr_dict [ ( attr . name . lower ( ) , attr . dimension_id ) ] = JSONObject ( attr ) attrs_to_add = [ ] existing_attrs = [ ] for potential_new_attr in attrs : if potential_new_attr is not None : log . debug ( "Adding attribute: %s" , potential_new_attr ) if attr_dict . get ( ( potential_new_attr . name . lower ( ) , potential_new_attr . dimension_id ) ) is None : attrs_to_add . append ( JSONObject ( potential_new_attr ) ) else : existing_attrs . append ( attr_dict . get ( ( potential_new_attr . name . lower ( ) , potential_new_attr . dimension_id ) ) ) new_attrs = [ ] for attr in attrs_to_add : attr_i = Attr ( ) attr_i . name = attr . name attr_i . dimension_id = attr . dimension_id attr_i . description = attr . description db . DBSession . add ( attr_i ) new_attrs . append ( attr_i ) db . DBSession . flush ( ) new_attrs = new_attrs + existing_attrs return [ JSONObject ( a ) for a in new_attrs ]
Add a list of generic attributes which can then be used in creating a resource attribute and put into a type .
46,016
def get_attributes ( ** kwargs ) : attrs = db . DBSession . query ( Attr ) . order_by ( Attr . name ) . all ( ) return attrs
Get all attributes
46,017
def add_resource_attribute ( resource_type , resource_id , attr_id , is_var , error_on_duplicate = True , ** kwargs ) : attr = db . DBSession . query ( Attr ) . filter ( Attr . id == attr_id ) . first ( ) if attr is None : raise ResourceNotFoundError ( "Attribute with ID %s does not exist." % attr_id ) resource_i = _get_resource ( resource_type , resource_id ) resourceattr_qry = db . DBSession . query ( ResourceAttr ) . filter ( ResourceAttr . ref_key == resource_type ) if resource_type == 'NETWORK' : resourceattr_qry = resourceattr_qry . filter ( ResourceAttr . network_id == resource_id ) elif resource_type == 'NODE' : resourceattr_qry = resourceattr_qry . filter ( ResourceAttr . node_id == resource_id ) elif resource_type == 'LINK' : resourceattr_qry = resourceattr_qry . filter ( ResourceAttr . link_id == resource_id ) elif resource_type == 'GROUP' : resourceattr_qry = resourceattr_qry . filter ( ResourceAttr . group_id == resource_id ) elif resource_type == 'PROJECT' : resourceattr_qry = resourceattr_qry . filter ( ResourceAttr . project_id == resource_id ) else : raise HydraError ( 'Resource type "{}" not recognised.' . format ( resource_type ) ) resource_attrs = resourceattr_qry . all ( ) for ra in resource_attrs : if ra . attr_id == attr_id : if not error_on_duplicate : return ra raise HydraError ( "Duplicate attribute. %s %s already has attribute %s" % ( resource_type , resource_i . get_name ( ) , attr . name ) ) attr_is_var = 'Y' if is_var == 'Y' else 'N' new_ra = resource_i . add_attribute ( attr_id , attr_is_var ) db . DBSession . flush ( ) return new_ra
Add a resource attribute attribute to a resource .
46,018
def add_resource_attrs_from_type ( type_id , resource_type , resource_id , ** kwargs ) : type_i = _get_templatetype ( type_id ) resource_i = _get_resource ( resource_type , resource_id ) resourceattr_qry = db . DBSession . query ( ResourceAttr ) . filter ( ResourceAttr . ref_key == resource_type ) if resource_type == 'NETWORK' : resourceattr_qry . filter ( ResourceAttr . network_id == resource_id ) elif resource_type == 'NODE' : resourceattr_qry . filter ( ResourceAttr . node_id == resource_id ) elif resource_type == 'LINK' : resourceattr_qry . filter ( ResourceAttr . link_id == resource_id ) elif resource_type == 'GROUP' : resourceattr_qry . filter ( ResourceAttr . group_id == resource_id ) elif resource_type == 'PROJECT' : resourceattr_qry . filter ( ResourceAttr . project_id == resource_id ) resource_attrs = resourceattr_qry . all ( ) attrs = { } for res_attr in resource_attrs : attrs [ res_attr . attr_id ] = res_attr new_resource_attrs = [ ] for item in type_i . typeattrs : if attrs . get ( item . attr_id ) is None : ra = resource_i . add_attribute ( item . attr_id ) new_resource_attrs . append ( ra ) db . DBSession . flush ( ) return new_resource_attrs
adds all the attributes defined by a type to a node .
46,019
def get_all_resource_attributes ( ref_key , network_id , template_id = None , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) resource_attr_qry = db . DBSession . query ( ResourceAttr ) . outerjoin ( Node , Node . id == ResourceAttr . node_id ) . outerjoin ( Link , Link . id == ResourceAttr . link_id ) . outerjoin ( ResourceGroup , ResourceGroup . id == ResourceAttr . group_id ) . filter ( ResourceAttr . ref_key == ref_key , or_ ( and_ ( ResourceAttr . node_id != None , ResourceAttr . node_id == Node . id , Node . network_id == network_id ) , and_ ( ResourceAttr . link_id != None , ResourceAttr . link_id == Link . id , Link . network_id == network_id ) , and_ ( ResourceAttr . group_id != None , ResourceAttr . group_id == ResourceGroup . id , ResourceGroup . network_id == network_id ) ) ) if template_id is not None : attr_ids = [ ] rs = db . DBSession . query ( TypeAttr ) . join ( TemplateType , TemplateType . id == TypeAttr . type_id ) . filter ( TemplateType . template_id == template_id ) . all ( ) for r in rs : attr_ids . append ( r . attr_id ) resource_attr_qry = resource_attr_qry . filter ( ResourceAttr . attr_id . in_ ( attr_ids ) ) resource_attrs = resource_attr_qry . all ( ) return resource_attrs
Get all the resource attributes for a given resource type in the network . That includes all the resource attributes for a given type within the network . For example if the ref_key is NODE then it will return all the attirbutes of all nodes in the network . This function allows a front end to pre - load an entire network s resource attribute information to reduce on function calls . If type_id is specified only return the resource attributes within the type .
46,020
def get_resource_attributes ( ref_key , ref_id , type_id = None , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) resource_attr_qry = db . DBSession . query ( ResourceAttr ) . filter ( ResourceAttr . ref_key == ref_key , or_ ( ResourceAttr . network_id == ref_id , ResourceAttr . node_id == ref_id , ResourceAttr . link_id == ref_id , ResourceAttr . group_id == ref_id ) ) if type_id is not None : attr_ids = [ ] rs = db . DBSession . query ( TypeAttr ) . filter ( TypeAttr . type_id == type_id ) . all ( ) for r in rs : attr_ids . append ( r . attr_id ) resource_attr_qry = resource_attr_qry . filter ( ResourceAttr . attr_id . in_ ( attr_ids ) ) resource_attrs = resource_attr_qry . all ( ) return resource_attrs
Get all the resource attributes for a given resource . If type_id is specified only return the resource attributes within the type .
46,021
def check_attr_dimension ( attr_id , ** kwargs ) : attr_i = _get_attr ( attr_id ) datasets = db . DBSession . query ( Dataset ) . filter ( Dataset . id == ResourceScenario . dataset_id , ResourceScenario . resource_attr_id == ResourceAttr . id , ResourceAttr . attr_id == attr_id ) . all ( ) bad_datasets = [ ] for d in datasets : if attr_i . dimension_id is None and d . unit is not None or attr_i . dimension_id is not None and d . unit is None or units . get_dimension_by_unit_id ( d . unit_id ) != attr_i . dimension_id : bad_datasets . append ( d . id ) if len ( bad_datasets ) > 0 : raise HydraError ( "Datasets %s have a different dimension_id to attribute %s" % ( bad_datasets , attr_id ) ) return 'OK'
Check that the dimension of the resource attribute data is consistent with the definition of the attribute . If the attribute says volume make sure every dataset connected with this attribute via a resource attribute also has a dimension of volume .
46,022
def get_resource_attribute ( resource_attr_id , ** kwargs ) : resource_attr_qry = db . DBSession . query ( ResourceAttr ) . filter ( ResourceAttr . id == resource_attr_id , ) resource_attr = resource_attr_qry . first ( ) if resource_attr is None : raise ResourceNotFoundError ( "Resource attribute %s does not exist" , resource_attr_id ) return resource_attr
Get a specific resource attribte by ID If type_id is Gspecified only return the resource attributes within the type .
46,023
def delete_mappings_in_network ( network_id , network_2_id = None , ** kwargs ) : qry = db . DBSession . query ( ResourceAttrMap ) . filter ( or_ ( ResourceAttrMap . network_a_id == network_id , ResourceAttrMap . network_b_id == network_id ) ) if network_2_id is not None : qry = qry . filter ( or_ ( ResourceAttrMap . network_a_id == network_2_id , ResourceAttrMap . network_b_id == network_2_id ) ) mappings = qry . all ( ) for m in mappings : db . DBSession . delete ( m ) db . DBSession . flush ( ) return 'OK'
Delete all the resource attribute mappings in a network . If another network is specified only delete the mappings between the two networks .
46,024
def get_network_mappings ( network_id , network_2_id = None , ** kwargs ) : qry = db . DBSession . query ( ResourceAttrMap ) . filter ( or_ ( and_ ( ResourceAttrMap . resource_attr_id_a == ResourceAttr . id , ResourceAttr . network_id == network_id ) , and_ ( ResourceAttrMap . resource_attr_id_b == ResourceAttr . id , ResourceAttr . network_id == network_id ) ) ) if network_2_id is not None : aliased_ra = aliased ( ResourceAttr , name = "ra2" ) qry = qry . filter ( or_ ( and_ ( ResourceAttrMap . resource_attr_id_a == aliased_ra . id , aliased_ra . network_id == network_2_id ) , and_ ( ResourceAttrMap . resource_attr_id_b == aliased_ra . id , aliased_ra . network_id == network_2_id ) ) ) return qry . all ( )
Get all the mappings of network resource attributes NOT ALL THE MAPPINGS WITHIN A NETWORK . For that use get_mappings_in_network . If another network is specified only return the mappings between the two networks .
46,025
def check_attribute_mapping_exists ( resource_attr_id_source , resource_attr_id_target , ** kwargs ) : qry = db . DBSession . query ( ResourceAttrMap ) . filter ( ResourceAttrMap . resource_attr_id_a == resource_attr_id_source , ResourceAttrMap . resource_attr_id_b == resource_attr_id_target ) . all ( ) if len ( qry ) > 0 : return 'Y' else : return 'N'
Check whether an attribute mapping exists between a source and target resource attribute . returns Y if a mapping exists . Returns N in all other cases .
46,026
def get_attribute_group ( group_id , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : group_i = db . DBSession . query ( AttrGroup ) . filter ( AttrGroup . id == group_id ) . one ( ) group_i . project . check_read_permission ( user_id ) except NoResultFound : raise HydraError ( "Group %s not found" % ( group_id , ) ) return group_i
Get a specific attribute group
46,027
def delete_attribute_group ( group_id , ** kwargs ) : user_id = kwargs [ 'user_id' ] try : group_i = db . DBSession . query ( AttrGroup ) . filter ( AttrGroup . id == group_id ) . one ( ) group_i . project . check_write_permission ( user_id ) db . DBSession . delete ( group_i ) db . DBSession . flush ( ) log . info ( "Group %s in project %s deleted" , group_i . id , group_i . project_id ) except NoResultFound : raise HydraError ( 'No Attribute Group %s was found' , group_id ) return 'OK'
Delete an attribute group .
46,028
def get_network_attributegroup_items ( network_id , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) net_i = _get_network ( network_id ) net_i . check_read_permission ( user_id ) group_items_i = db . DBSession . query ( AttrGroupItem ) . filter ( AttrGroupItem . network_id == network_id ) . all ( ) return group_items_i
Get all the group items in a network
46,029
def get_group_attributegroup_items ( network_id , group_id , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) network_i = _get_network ( network_id ) network_i . check_read_permission ( user_id ) group_items_i = db . DBSession . query ( AttrGroupItem ) . filter ( AttrGroupItem . network_id == network_id , AttrGroupItem . group_id == group_id ) . all ( ) return group_items_i
Get all the items in a specified group within a network
46,030
def get_attribute_item_groups ( network_id , attr_id , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) network_i = _get_network ( network_id ) network_i . check_read_permission ( user_id ) group_items_i = db . DBSession . query ( AttrGroupItem ) . filter ( AttrGroupItem . network_id == network_id , AttrGroupItem . attr_id == attr_id ) . all ( ) return group_items_i
Get all the group items in a network with a given attribute_id
46,031
def share_network ( network_id , usernames , read_only , share , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) net_i = _get_network ( network_id ) net_i . check_share_permission ( user_id ) if read_only == 'Y' : write = 'N' share = 'N' else : write = 'Y' if net_i . created_by != int ( user_id ) and share == 'Y' : raise HydraError ( "Cannot share the 'sharing' ability as user %s is not" " the owner of network %s" % ( user_id , network_id ) ) for username in usernames : user_i = _get_user ( username ) net_i . set_owner ( user_i . id , write = write , share = share ) for o in net_i . project . owners : if o . user_id == user_i . id : break else : net_i . project . set_owner ( user_i . id , write = 'N' , share = 'N' ) db . DBSession . flush ( )
Share a network with a list of users identified by their usernames .
46,032
def unshare_network ( network_id , usernames , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) net_i = _get_network ( network_id ) net_i . check_share_permission ( user_id ) for username in usernames : user_i = _get_user ( username ) net_i . unset_owner ( user_i . id , write = write , share = share ) db . DBSession . flush ( )
Un - Share a network with a list of users identified by their usernames .
46,033
def share_project ( project_id , usernames , read_only , share , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) proj_i = _get_project ( project_id ) proj_i . check_share_permission ( int ( user_id ) ) user_id = int ( user_id ) for owner in proj_i . owners : if user_id == owner . user_id : break else : raise HydraError ( "Permission Denied. Cannot share project." ) if read_only == 'Y' : write = 'N' share = 'N' else : write = 'Y' if proj_i . created_by != user_id and share == 'Y' : raise HydraError ( "Cannot share the 'sharing' ability as user %s is not" " the owner of project %s" % ( user_id , project_id ) ) for username in usernames : user_i = _get_user ( username ) proj_i . set_owner ( user_i . id , write = write , share = share ) for net_i in proj_i . networks : net_i . set_owner ( user_i . id , write = write , share = share ) db . DBSession . flush ( )
Share an entire project with a list of users identifed by their usernames .
46,034
def unshare_project ( project_id , usernames , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) proj_i = _get_project ( project_id ) proj_i . check_share_permission ( user_id ) for username in usernames : user_i = _get_user ( username ) proj_i . unset_owner ( user_i . id , write = write , share = share ) db . DBSession . flush ( )
Un - share a project with a list of users identified by their usernames .
46,035
def set_project_permission ( project_id , usernames , read , write , share , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) proj_i = _get_project ( project_id ) proj_i . check_share_permission ( user_id ) if read == 'N' : write = 'N' share = 'N' for username in usernames : user_i = _get_user ( username ) if proj_i . created_by == user_i . id : raise HydraError ( "Cannot set permissions on project %s" " for user %s as this user is the creator." % ( project_id , username ) ) proj_i . set_owner ( user_i . id , read = read , write = write ) for net_i in proj_i . networks : net_i . set_owner ( user_i . id , read = read , write = write , share = share ) db . DBSession . flush ( )
Set permissions on a project to a list of users identifed by their usernames .
46,036
def get_all_project_owners ( project_ids = None , ** kwargs ) : projowner_qry = db . DBSession . query ( ProjectOwner ) if project_ids is not None : projowner_qry = projowner_qry . filter ( ProjectOwner . project_id . in_ ( project_ids ) ) project_owners_i = projowner_qry . all ( ) return [ JSONObject ( project_owner_i ) for project_owner_i in project_owners_i ]
Get the project owner entries for all the requested projects . If the project_ids argument is None return all the owner entries for ALL projects
46,037
def get_all_network_owners ( network_ids = None , ** kwargs ) : networkowner_qry = db . DBSession . query ( NetworkOwner ) if network_ids is not None : networkowner_qry = networkowner_qry . filter ( NetworkOwner . network_id . in_ ( network_ids ) ) network_owners_i = networkowner_qry . all ( ) return [ JSONObject ( network_owner_i ) for network_owner_i in network_owners_i ]
Get the network owner entries for all the requested networks . If the network_ids argument is None return all the owner entries for ALL networks
46,038
def add_dataset ( data_type , val , unit_id = None , metadata = { } , name = "" , user_id = None , flush = False ) : d = Dataset ( ) d . type = data_type d . value = val d . set_metadata ( metadata ) d . unit_id = unit_id d . name = name d . created_by = user_id d . hash = d . set_hash ( ) try : existing_dataset = db . DBSession . query ( Dataset ) . filter ( Dataset . hash == d . hash ) . one ( ) if existing_dataset . check_user ( user_id ) : d = existing_dataset else : d . set_metadata ( { 'created_at' : datetime . datetime . now ( ) } ) d . set_hash ( ) db . DBSession . add ( d ) except NoResultFound : db . DBSession . add ( d ) if flush == True : db . DBSession . flush ( ) return d
Data can exist without scenarios . This is the mechanism whereby single pieces of data can be added without doing it through a scenario .
46,039
def _bulk_insert_data ( bulk_data , user_id = None , source = None ) : get_timing = lambda x : datetime . datetime . now ( ) - x start_time = datetime . datetime . now ( ) new_data = _process_incoming_data ( bulk_data , user_id , source ) log . info ( "Incoming data processed in %s" , ( get_timing ( start_time ) ) ) existing_data = _get_existing_data ( new_data . keys ( ) ) log . info ( "Existing data retrieved." ) hash_id_map = { } new_datasets = [ ] metadata = { } for d in bulk_data : dataset_dict = new_data [ d . hash ] current_hash = d . hash if existing_data . get ( current_hash ) is not None : dataset = existing_data . get ( current_hash ) if dataset . check_user ( user_id ) == False : new_dataset = _make_new_dataset ( dataset_dict ) new_datasets . append ( new_dataset ) metadata [ new_dataset [ 'hash' ] ] = dataset_dict [ 'metadata' ] else : hash_id_map [ current_hash ] = dataset elif current_hash in hash_id_map : new_datasets . append ( dataset_dict ) else : new_datasets . append ( dataset_dict ) hash_id_map [ current_hash ] = dataset_dict metadata [ current_hash ] = dataset_dict [ 'metadata' ] log . debug ( "Isolating new data %s" , get_timing ( start_time ) ) new_data_for_insert = [ ] new_data_hashes = [ ] for d in new_datasets : if d [ 'hash' ] not in new_data_hashes : new_data_for_insert . append ( d ) new_data_hashes . append ( d [ 'hash' ] ) if len ( new_data_for_insert ) > 0 : log . debug ( "Inserting new data %s" , get_timing ( start_time ) ) db . DBSession . bulk_insert_mappings ( Dataset , new_data_for_insert ) log . debug ( "New data Inserted %s" , get_timing ( start_time ) ) new_data = _get_existing_data ( new_data_hashes ) log . debug ( "New data retrieved %s" , get_timing ( start_time ) ) for k , v in new_data . items ( ) : hash_id_map [ k ] = v _insert_metadata ( metadata , hash_id_map ) log . debug ( "Metadata inserted %s" , get_timing ( start_time ) ) returned_ids = [ ] for d in bulk_data : returned_ids . append ( hash_id_map [ d . hash ] ) log . info ( "Done bulk inserting data. %s datasets" , len ( returned_ids ) ) return returned_ids
Insert lots of datasets at once to reduce the number of DB interactions . user_id indicates the user adding the data source indicates the name of the app adding the data both user_id and source are added as metadata
46,040
def _get_metadata ( dataset_ids ) : metadata = [ ] if len ( dataset_ids ) == 0 : return [ ] if len ( dataset_ids ) > qry_in_threshold : idx = 0 extent = qry_in_threshold while idx < len ( dataset_ids ) : log . info ( "Querying %s metadatas" , len ( dataset_ids [ idx : extent ] ) ) rs = db . DBSession . query ( Metadata ) . filter ( Metadata . dataset_id . in_ ( dataset_ids [ idx : extent ] ) ) . all ( ) metadata . extend ( rs ) idx = idx + qry_in_threshold if idx + qry_in_threshold > len ( dataset_ids ) : extent = len ( dataset_ids ) else : extent = extent + qry_in_threshold else : metadata_qry = db . DBSession . query ( Metadata ) . filter ( Metadata . dataset_id . in_ ( dataset_ids ) ) for m in metadata_qry : metadata . append ( m ) return metadata
Get all the metadata for a given list of datasets
46,041
def _get_datasets ( dataset_ids ) : dataset_dict = { } datasets = [ ] if len ( dataset_ids ) > qry_in_threshold : idx = 0 extent = qry_in_threshold while idx < len ( dataset_ids ) : log . info ( "Querying %s datasets" , len ( dataset_ids [ idx : extent ] ) ) rs = db . DBSession . query ( Dataset ) . filter ( Dataset . id . in_ ( dataset_ids [ idx : extent ] ) ) . all ( ) datasets . extend ( rs ) idx = idx + qry_in_threshold if idx + qry_in_threshold > len ( dataset_ids ) : extent = len ( dataset_ids ) else : extent = extent + qry_in_threshold else : datasets = db . DBSession . query ( Dataset ) . filter ( Dataset . id . in_ ( dataset_ids ) ) for r in datasets : dataset_dict [ r . id ] = r log . info ( "Retrieved %s datasets" , len ( dataset_dict ) ) return dataset_dict
Get all the datasets in a list of dataset IDS . This must be done in chunks of 999 as sqlite can only handle in with < 1000 elements .
46,042
def get_vals_between_times ( dataset_id , start_time , end_time , timestep , increment , ** kwargs ) : try : server_start_time = get_datetime ( start_time ) server_end_time = get_datetime ( end_time ) times = [ server_start_time ] next_time = server_start_time while next_time < server_end_time : if int ( increment ) == 0 : raise HydraError ( "%s is not a valid increment for this search." % increment ) next_time = next_time + datetime . timedelta ( ** { timestep : int ( increment ) } ) times . append ( next_time ) except ValueError : try : server_start_time = Decimal ( start_time ) server_end_time = Decimal ( end_time ) times = [ float ( server_start_time ) ] next_time = server_start_time while next_time < server_end_time : next_time = float ( next_time ) + increment times . append ( next_time ) except : raise HydraError ( "Unable to get times. Please check to and from times." ) td = db . DBSession . query ( Dataset ) . filter ( Dataset . id == dataset_id ) . one ( ) log . debug ( "Number of times to fetch: %s" , len ( times ) ) data = td . get_val ( timestamp = times ) data_to_return = [ ] if type ( data ) is list : for d in data : if d is not None : data_to_return . append ( list ( d ) ) elif data is None : data_to_return = [ ] else : data_to_return . append ( data ) dataset = JSONObject ( { 'data' : json . dumps ( data_to_return ) } ) return dataset
Retrive data between two specified times within a timeseries . The times need not be specified in the timeseries . This function will fill in the blanks .
46,043
def delete_dataset ( dataset_id , ** kwargs ) : try : d = db . DBSession . query ( Dataset ) . filter ( Dataset . id == dataset_id ) . one ( ) except NoResultFound : raise HydraError ( "Dataset %s does not exist." % dataset_id ) dataset_rs = db . DBSession . query ( ResourceScenario ) . filter ( ResourceScenario . dataset_id == dataset_id ) . all ( ) if len ( dataset_rs ) > 0 : raise HydraError ( "Cannot delete %s. Dataset is used by one or more resource scenarios." % dataset_id ) db . DBSession . delete ( d ) db . DBSession . flush ( ) db . DBSession . expunge_all ( )
Removes a piece of data from the DB . CAUTION! Use with care as this cannot be undone easily .
46,044
def add_note ( note , ** kwargs ) : note_i = Note ( ) note_i . ref_key = note . ref_key note_i . set_ref ( note . ref_key , note . ref_id ) note_i . value = note . value note_i . created_by = kwargs . get ( 'user_id' ) db . DBSession . add ( note_i ) db . DBSession . flush ( ) return note_i
Add a new note
46,045
def update_note ( note , ** kwargs ) : note_i = _get_note ( note . id ) if note . ref_key != note_i . ref_key : raise HydraError ( "Cannot convert a %s note to a %s note. Please create a new note instead." % ( note_i . ref_key , note . ref_key ) ) note_i . set_ref ( note . ref_key , note . ref_id ) note_i . value = note . value db . DBSession . flush ( ) return note_i
Update a note
46,046
def purge_note ( note_id , ** kwargs ) : note_i = _get_note ( note_id ) db . DBSession . delete ( note_i ) db . DBSession . flush ( )
Remove a note from the DB permenantly
46,047
def login ( username , password , ** kwargs ) : user_id = util . hdb . login_user ( username , password ) hydra_session = session . Session ( { } , validate_key = config . get ( 'COOKIES' , 'VALIDATE_KEY' , 'YxaDbzUUSo08b+' ) , type = 'file' , cookie_expires = True , data_dir = config . get ( 'COOKIES' , 'DATA_DIR' , '/tmp' ) , file_dir = config . get ( 'COOKIES' , 'FILE_DIR' , '/tmp/auth' ) ) hydra_session [ 'user_id' ] = user_id hydra_session [ 'username' ] = username hydra_session . save ( ) return ( user_id , hydra_session . id )
Login a user returning a dict containing their user_id and session_id
46,048
def logout ( session_id , ** kwargs ) : hydra_session_object = session . SessionObject ( { } , validate_key = config . get ( 'COOKIES' , 'VALIDATE_KEY' , 'YxaDbzUUSo08b+' ) , type = 'file' , cookie_expires = True , data_dir = config . get ( 'COOKIES' , 'DATA_DIR' , '/tmp' ) , file_dir = config . get ( 'COOKIES' , 'FILE_DIR' , '/tmp/auth' ) ) hydra_session = hydra_session_object . get_by_id ( session_id ) if hydra_session is not None : hydra_session . delete ( ) hydra_session . save ( ) return 'OK'
Logout a user removing their cookie if it exists and returning OK
46,049
def get_session_user ( session_id , ** kwargs ) : hydra_session_object = session . SessionObject ( { } , validate_key = config . get ( 'COOKIES' , 'VALIDATE_KEY' , 'YxaDbzUUSo08b+' ) , type = 'file' , cookie_expires = True , data_dir = config . get ( 'COOKIES' , 'DATA_DIR' , '/tmp' ) , file_dir = config . get ( 'COOKIES' , 'FILE_DIR' , '/tmp/auth' ) ) hydra_session = hydra_session_object . get_by_id ( session_id ) if hydra_session is not None : return hydra_session [ 'user_id' ] return None
Given a session ID get the user ID that it is associated with
46,050
def array_dim ( arr ) : dim = [ ] while True : try : dim . append ( len ( arr ) ) arr = arr [ 0 ] except TypeError : return dim
Return the size of a multidimansional array .
46,051
def arr_to_vector ( arr ) : dim = array_dim ( arr ) tmp_arr = [ ] for n in range ( len ( dim ) - 1 ) : for inner in arr : for i in inner : tmp_arr . append ( i ) arr = tmp_arr tmp_arr = [ ] return arr
Reshape a multidimensional array to a vector .
46,052
def vector_to_arr ( vec , dim ) : if len ( dim ) <= 1 : return vec array = vec while len ( dim ) > 1 : i = 0 outer_array = [ ] for m in range ( reduce ( mul , dim [ 0 : - 1 ] ) ) : inner_array = [ ] for n in range ( dim [ - 1 ] ) : inner_array . append ( array [ i ] ) i += 1 outer_array . append ( inner_array ) array = outer_array dim = dim [ 0 : - 1 ] return array
Reshape a vector to a multidimensional array with dimensions dim .
46,053
def validate_ENUM ( in_value , restriction ) : value = _get_val ( in_value ) if type ( value ) is list : for subval in value : if type ( subval ) is tuple : subval = subval [ 1 ] validate_ENUM ( subval , restriction ) else : if value not in restriction : raise ValidationError ( "ENUM : %s" % ( restriction ) )
Test to ensure that the given value is contained in the provided list . the value parameter must be either a single value or a 1 - dimensional list . All the values in this list must satisfy the ENUM
46,054
def validate_NUMPLACES ( in_value , restriction ) : if type ( restriction ) is list : restriction = restriction [ 0 ] value = _get_val ( in_value ) if type ( value ) is list : for subval in value : if type ( subval ) is tuple : subval = subval [ 1 ] validate_NUMPLACES ( subval , restriction ) else : restriction = int ( restriction ) dec_val = Decimal ( str ( value ) ) num_places = dec_val . as_tuple ( ) . exponent * - 1 if restriction != num_places : raise ValidationError ( "NUMPLACES: %s" % ( restriction ) )
the value parameter must be either a single value or a 1 - dimensional list . All the values in this list must satisfy the condition
46,055
def validate_EQUALTIMESTEPS ( value , restriction ) : if len ( value ) == 0 : return if type ( value ) == pd . DataFrame : if str ( value . index [ 0 ] ) . startswith ( '9999' ) : tmp_val = value . to_json ( ) . replace ( '9999' , '1900' ) value = pd . read_json ( tmp_val ) if type ( value . index ) == pd . Int64Index : timesteps = list ( value . index ) timestep = timesteps [ 1 ] - timesteps [ 0 ] for i , t in enumerate ( timesteps [ 1 : ] ) : if timesteps [ i ] - timesteps [ i - 1 ] != timestep : raise ValidationError ( "Timesteps not equal: %s" % ( list ( value . index ) ) ) if not hasattr ( value . index , 'inferred_freq' ) : raise ValidationError ( "Timesteps not equal: %s" % ( list ( value . index ) , ) ) if restriction is None : if value . index . inferred_freq is None : raise ValidationError ( "Timesteps not equal: %s" % ( list ( value . index ) , ) ) else : if value . index . inferred_freq != restriction : raise ValidationError ( "Timesteps not equal: %s" % ( list ( value . index ) , ) )
Ensure that the timesteps in a timeseries are equal . If a restriction is provided they must be equal to the specified restriction .
46,056
def flatten_dict ( value , target_depth = 1 , depth = None ) : if target_depth is None : target_depth = 1 values = list ( value . values ( ) ) if len ( values ) == 0 : return { } else : if depth is None : depth = count_levels ( value ) if isinstance ( values [ 0 ] , dict ) and len ( values [ 0 ] ) > 0 : subval = list ( values [ 0 ] . values ( ) ) [ 0 ] if not isinstance ( subval , dict ) != 'object' : return value if target_depth >= depth : return value flatval = { } for k in value . keys ( ) : subval = flatten_dict ( value [ k ] , target_depth , depth - 1 ) for k1 in subval . keys ( ) : flatval [ str ( k ) + "_" + str ( k1 ) ] = subval [ k1 ] return flatval else : return value
Take a hashtable with multiple nested dicts and return a dict where the keys are a concatenation of each sub - key .
46,057
def to_named_tuple ( keys , values ) : values = [ dbobject . __dict__ [ key ] for key in dbobject . keys ( ) ] tuple_object = namedtuple ( 'DBObject' , dbobject . keys ( ) ) tuple_instance = tuple_object . _make ( values ) return tuple_instance
Convert a sqlalchemy object into a named tuple
46,058
def get_val ( dataset , timestamp = None ) : if dataset . type == 'array' : return json . loads ( dataset . value ) elif dataset . type == 'descriptor' : return str ( dataset . value ) elif dataset . type == 'scalar' : return Decimal ( str ( dataset . value ) ) elif dataset . type == 'timeseries' : val = dataset . value seasonal_year = config . get ( 'DEFAULT' , 'seasonal_year' , '1678' ) seasonal_key = config . get ( 'DEFAULT' , 'seasonal_key' , '9999' ) val = dataset . value . replace ( seasonal_key , seasonal_year ) timeseries = pd . read_json ( val , convert_axes = True ) if timestamp is None : return timeseries else : try : idx = timeseries . index if type ( idx ) == pd . DatetimeIndex : if set ( idx . year ) == set ( [ int ( seasonal_year ) ] ) : if isinstance ( timestamp , list ) : seasonal_timestamp = [ ] for t in timestamp : t_1900 = t . replace ( year = int ( seasonal_year ) ) seasonal_timestamp . append ( t_1900 ) timestamp = seasonal_timestamp else : timestamp = [ timestamp . replace ( year = int ( seasonal_year ) ) ] pandas_ts = timeseries . reindex ( timestamp , method = 'ffill' ) if len ( pandas_ts . dropna ( ) ) == 0 : return None pandas_ts = pandas_ts . where ( pandas_ts . notnull ( ) , None ) val_is_array = False if len ( pandas_ts . columns ) > 1 : val_is_array = True if val_is_array : if type ( timestamp ) is list and len ( timestamp ) == 1 : ret_val = pandas_ts . loc [ timestamp [ 0 ] ] . values . tolist ( ) else : ret_val = pandas_ts . loc [ timestamp ] . values . tolist ( ) else : col_name = pandas_ts . loc [ timestamp ] . columns [ 0 ] if type ( timestamp ) is list and len ( timestamp ) == 1 : ret_val = pandas_ts . loc [ timestamp [ 0 ] ] . loc [ col_name ] else : ret_val = pandas_ts . loc [ timestamp ] [ col_name ] . values . tolist ( ) return ret_val except Exception as e : log . critical ( "Unable to retrive data. Check timestamps." ) log . critical ( e )
Turn the string value of a dataset into an appropriate value be it a decimal value array or time series .
46,059
def get_layout_as_string ( layout ) : if isinstance ( layout , dict ) : return json . dumps ( layout ) if ( isinstance ( layout , six . string_types ) ) : try : return get_layout_as_string ( json . loads ( layout ) ) except : return layout
Take a dict or string and return a string . The dict will be json dumped . The string will json parsed to check for json validity . In order to deal with strings which have been json encoded multiple times keep json decoding until a dict is retrieved or until a non - json structure is identified .
46,060
def get_layout_as_dict ( layout ) : if isinstance ( layout , dict ) : return layout if ( isinstance ( layout , six . string_types ) ) : try : return get_layout_as_dict ( json . loads ( layout ) ) except : return layout
Take a dict or string and return a dict if the data is json - encoded . The string will json parsed to check for json validity . In order to deal with strings which have been json encoded multiple times keep json decoding until a dict is retrieved or until a non - json structure is identified .
46,061
def get_username ( uid , ** kwargs ) : rs = db . DBSession . query ( User . username ) . filter ( User . id == uid ) . one ( ) if rs is None : raise ResourceNotFoundError ( "User with ID %s not found" % uid ) return rs . username
Return the username of a given user_id
46,062
def get_usernames_like ( username , ** kwargs ) : checkname = "%%%s%%" % username rs = db . DBSession . query ( User . username ) . filter ( User . username . like ( checkname ) ) . all ( ) return [ r . username for r in rs ]
Return a list of usernames like the given string .
46,063
def update_user_display_name ( user , ** kwargs ) : try : user_i = db . DBSession . query ( User ) . filter ( User . id == user . id ) . one ( ) user_i . display_name = user . display_name return user_i except NoResultFound : raise ResourceNotFoundError ( "User (id=%s) not found" % ( user . id ) )
Update a user s display name
46,064
def update_user_password ( new_pwd_user_id , new_password , ** kwargs ) : try : user_i = db . DBSession . query ( User ) . filter ( User . id == new_pwd_user_id ) . one ( ) user_i . password = bcrypt . hashpw ( str ( new_password ) . encode ( 'utf-8' ) , bcrypt . gensalt ( ) ) return user_i except NoResultFound : raise ResourceNotFoundError ( "User (id=%s) not found" % ( new_pwd_user_id ) )
Update a user s password
46,065
def get_user ( uid , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) if uid is None : uid = user_id user_i = _get_user ( uid ) return user_i
Get a user by ID
46,066
def add_role ( role , ** kwargs ) : role_i = Role ( name = role . name , code = role . code ) db . DBSession . add ( role_i ) db . DBSession . flush ( ) return role_i
Add a new role
46,067
def add_perm ( perm , ** kwargs ) : perm_i = Perm ( name = perm . name , code = perm . code ) db . DBSession . add ( perm_i ) db . DBSession . flush ( ) return perm_i
Add a permission
46,068
def delete_perm ( perm_id , ** kwargs ) : try : perm_i = db . DBSession . query ( Perm ) . filter ( Perm . id == perm_id ) . one ( ) db . DBSession . delete ( perm_i ) except InvalidRequestError : raise ResourceNotFoundError ( "Permission (id=%s) does not exist" % ( perm_id ) ) return 'OK'
Delete a permission
46,069
def set_user_role ( new_user_id , role_id , ** kwargs ) : try : _get_user ( new_user_id ) role_i = _get_role ( role_id ) roleuser_i = RoleUser ( user_id = new_user_id , role_id = role_id ) role_i . roleusers . append ( roleuser_i ) db . DBSession . flush ( ) except Exception as e : log . exception ( e ) raise ResourceNotFoundError ( "User or Role does not exist" ) return role_i
Apply role_id to new_user_id
46,070
def delete_user_role ( deleted_user_id , role_id , ** kwargs ) : try : _get_user ( deleted_user_id ) _get_role ( role_id ) roleuser_i = db . DBSession . query ( RoleUser ) . filter ( RoleUser . user_id == deleted_user_id , RoleUser . role_id == role_id ) . one ( ) db . DBSession . delete ( roleuser_i ) except NoResultFound : raise ResourceNotFoundError ( "User Role does not exist" ) return 'OK'
Remove a user from a role
46,071
def set_role_perm ( role_id , perm_id , ** kwargs ) : _get_perm ( perm_id ) role_i = _get_role ( role_id ) roleperm_i = RolePerm ( role_id = role_id , perm_id = perm_id ) role_i . roleperms . append ( roleperm_i ) db . DBSession . flush ( ) return role_i
Insert a permission into a role
46,072
def delete_role_perm ( role_id , perm_id , ** kwargs ) : _get_perm ( perm_id ) _get_role ( role_id ) try : roleperm_i = db . DBSession . query ( RolePerm ) . filter ( RolePerm . role_id == role_id , RolePerm . perm_id == perm_id ) . one ( ) db . DBSession . delete ( roleperm_i ) except NoResultFound : raise ResourceNotFoundError ( "Role Perm does not exist" ) return 'OK'
Remove a permission from a role
46,073
def update_role ( role , ** kwargs ) : try : role_i = db . DBSession . query ( Role ) . filter ( Role . id == role . id ) . one ( ) role_i . name = role . name role_i . code = role . code except NoResultFound : raise ResourceNotFoundError ( "Role (role_id=%s) does not exist" % ( role . id ) ) for perm in role . permissions : _get_perm ( perm . id ) roleperm_i = RolePerm ( role_id = role . id , perm_id = perm . id ) db . DBSession . add ( roleperm_i ) for user in role . users : _get_user ( user . id ) roleuser_i = RoleUser ( user_id = user . id , perm_id = perm . id ) db . DBSession . add ( roleuser_i ) db . DBSession . flush ( ) return role_i
Update the role . Used to add permissions and users to a role .
46,074
def get_all_users ( ** kwargs ) : users_qry = db . DBSession . query ( User ) filter_type = kwargs . get ( 'filter_type' ) filter_value = kwargs . get ( 'filter_value' ) if filter_type is not None : if filter_type == "id" : if isinstance ( filter_value , str ) : log . info ( "[HB.users] Getting user by Filter ID : %s" , filter_value ) filter_value = eval ( filter_value ) if type ( filter_value ) is int : users_qry = users_qry . filter ( User . id == filter_value ) else : users_qry = users_qry . filter ( User . id . in_ ( filter_value ) ) elif filter_type == "username" : if isinstance ( filter_value , str ) : log . info ( "[HB.users] Getting user by Filter Username : %s" , filter_value ) filter_value = filter_value . split ( "," ) for i , em in enumerate ( filter_value ) : log . info ( "[HB.users] >>> Getting user by single Username : %s" , em ) filter_value [ i ] = em . strip ( ) if isinstance ( filter_value , str ) : users_qry = users_qry . filter ( User . username == filter_value ) else : users_qry = users_qry . filter ( User . username . in_ ( filter_value ) ) else : raise Exception ( "Filter type '{}' not allowed" . format ( filter_type ) ) else : log . info ( '[HB.users] Getting All Users' ) rs = users_qry . all ( ) return rs
Get the username & ID of all users . Use the the filter if it has been provided The filter has to be a list of values
46,075
def get_role ( role_id , ** kwargs ) : try : role = db . DBSession . query ( Role ) . filter ( Role . id == role_id ) . one ( ) return role except NoResultFound : raise HydraError ( "Role not found (role_id={})" . format ( role_id ) )
Get a role by its ID .
46,076
def get_role_by_code ( role_code , ** kwargs ) : try : role = db . DBSession . query ( Role ) . filter ( Role . code == role_code ) . one ( ) return role except NoResultFound : raise ResourceNotFoundError ( "Role not found (role_code={})" . format ( role_code ) )
Get a role by its code
46,077
def get_perm ( perm_id , ** kwargs ) : try : perm = db . DBSession . query ( Perm ) . filter ( Perm . id == perm_id ) . one ( ) return perm except NoResultFound : raise ResourceNotFoundError ( "Permission not found (perm_id={})" . format ( perm_id ) )
Get all permissions
46,078
def get_perm_by_code ( perm_code , ** kwargs ) : try : perm = db . DBSession . query ( Perm ) . filter ( Perm . code == perm_code ) . one ( ) return perm except NoResultFound : raise ResourceNotFoundError ( "Permission not found (perm_code={})" . format ( perm_code ) )
Get a permission by its code
46,079
def _create_dataframe ( cls , value ) : try : ordered_jo = json . loads ( six . text_type ( value ) , object_pairs_hook = collections . OrderedDict ) cols = list ( ordered_jo . keys ( ) ) if len ( cols ) == 0 : raise ValueError ( "Dataframe has no columns" ) if isinstance ( ordered_jo [ cols [ 0 ] ] , list ) : index = range ( len ( ordered_jo [ cols [ 0 ] ] ) ) else : index = list ( ordered_jo [ cols [ 0 ] ] . keys ( ) ) data = [ ] for c in cols : if isinstance ( ordered_jo [ c ] , list ) : data . append ( ordered_jo [ c ] ) else : data . append ( list ( ordered_jo [ c ] . values ( ) ) ) try : np_data = np . array ( data ) except ValueError : np_data = None if np_data is not None and np_data . shape == ( len ( cols ) , len ( index ) ) : df = pd . DataFrame ( np_data , columns = index , index = cols ) . transpose ( ) else : df = pd . DataFrame ( data , columns = index , index = cols ) . transpose ( ) except ValueError as e : raise HydraError ( str ( e ) ) except AssertionError as e : log . warning ( "An error occurred creating the new data frame: %s. Defaulting to a simple read_json" % ( e ) ) df = pd . read_json ( value ) . fillna ( 0 ) return df
Builds a dataframe from the value
46,080
def parse_value ( self ) : try : if self . value is None : log . warning ( "Cannot parse dataset. No value specified." ) return None data = six . text_type ( self . value ) if data . upper ( ) . strip ( ) in ( "NULL" , "" ) : return "NULL" data = data [ 0 : 100 ] log . info ( "[Dataset.parse_value] Parsing %s (%s)" , data , type ( data ) ) return HydraObjectFactory . valueFromDataset ( self . type , self . value , self . get_metadata_as_dict ( ) ) except Exception as e : log . exception ( e ) raise HydraError ( "Error parsing value %s: %s" % ( self . value , e ) )
Turn the value of an incoming dataset into a hydra - friendly value .
46,081
def get_metadata_as_dict ( self , user_id = None , source = None ) : if self . metadata is None or self . metadata == "" : return { } metadata_dict = self . metadata if isinstance ( self . metadata , dict ) else json . loads ( self . metadata ) metadata_keys = [ m . lower ( ) for m in metadata_dict ] if user_id is not None and 'user_id' not in metadata_keys : metadata_dict [ 'user_id' ] = six . text_type ( user_id ) if source is not None and 'source' not in metadata_keys : metadata_dict [ 'source' ] = six . text_type ( source ) return { k : six . text_type ( v ) for k , v in metadata_dict . items ( ) }
Convert a metadata json string into a dictionary .
46,082
def delete_resourcegroup ( group_id , ** kwargs ) : group_i = _get_group ( group_id ) db . DBSession . delete ( group_i ) db . DBSession . flush ( ) return 'OK'
Add a new group to a scenario .
46,083
def _is_admin ( user_id ) : user = get_session ( ) . query ( User ) . filter ( User . id == user_id ) . one ( ) if user . is_admin ( ) : return True else : return False
Is the specified user an admin
46,084
def set_metadata ( self , metadata_dict ) : if metadata_dict is None : return existing_metadata = [ ] for m in self . metadata : existing_metadata . append ( m . key ) if m . key in metadata_dict : if m . value != metadata_dict [ m . key ] : m . value = metadata_dict [ m . key ] for k , v in metadata_dict . items ( ) : if k not in existing_metadata : m_i = Metadata ( key = str ( k ) , value = str ( v ) ) self . metadata . append ( m_i ) metadata_to_delete = set ( existing_metadata ) . difference ( set ( metadata_dict . keys ( ) ) ) for m in self . metadata : if m . key in metadata_to_delete : get_session ( ) . delete ( m )
Set the metadata on a dataset
46,085
def check_user ( self , user_id ) : if self . hidden == 'N' : return True for owner in self . owners : if int ( owner . user_id ) == int ( user_id ) : if owner . view == 'Y' : return True return False
Check whether this user can read this dataset
46,086
def get_network ( self ) : ref_key = self . ref_key if ref_key == 'NETWORK' : return self . network elif ref_key == 'NODE' : return self . node . network elif ref_key == 'LINK' : return self . link . network elif ref_key == 'GROUP' : return self . group . network elif ref_key == 'PROJECT' : return None
Get the network that this resource attribute is in .
46,087
def check_read_permission ( self , user_id , do_raise = True ) : return self . get_resource ( ) . check_read_permission ( user_id , do_raise = do_raise )
Check whether this user can read this resource attribute
46,088
def check_write_permission ( self , user_id , do_raise = True ) : return self . get_resource ( ) . check_write_permission ( user_id , do_raise = do_raise )
Check whether this user can write this node
46,089
def add_link ( self , name , desc , layout , node_1 , node_2 ) : existing_link = get_session ( ) . query ( Link ) . filter ( Link . name == name , Link . network_id == self . id ) . first ( ) if existing_link is not None : raise HydraError ( "A link with name %s is already in network %s" % ( name , self . id ) ) l = Link ( ) l . name = name l . description = desc l . layout = json . dumps ( layout ) if layout is not None else None l . node_a = node_1 l . node_b = node_2 get_session ( ) . add ( l ) self . links . append ( l ) return l
Add a link to a network . Links are what effectively define the network topology by associating two already existing nodes .
46,090
def add_node ( self , name , desc , layout , node_x , node_y ) : existing_node = get_session ( ) . query ( Node ) . filter ( Node . name == name , Node . network_id == self . id ) . first ( ) if existing_node is not None : raise HydraError ( "A node with name %s is already in network %s" % ( name , self . id ) ) node = Node ( ) node . name = name node . description = desc node . layout = str ( layout ) if layout is not None else None node . x = node_x node . y = node_y get_session ( ) . add ( node ) self . nodes . append ( node ) return node
Add a node to a network .
46,091
def check_read_permission ( self , user_id , do_raise = True ) : if _is_admin ( user_id ) : return True if int ( self . created_by ) == int ( user_id ) : return True for owner in self . owners : if int ( owner . user_id ) == int ( user_id ) : if owner . view == 'Y' : break else : if do_raise is True : raise PermissionError ( "Permission denied. User %s does not have read" " access on network %s" % ( user_id , self . id ) ) else : return False return True
Check whether this user can read this network
46,092
def check_share_permission ( self , user_id ) : if _is_admin ( user_id ) : return if int ( self . created_by ) == int ( user_id ) : return for owner in self . owners : if owner . user_id == int ( user_id ) : if owner . view == 'Y' and owner . share == 'Y' : break else : raise PermissionError ( "Permission denied. User %s does not have share" " access on network %s" % ( user_id , self . id ) )
Check whether this user can write this project
46,093
def check_read_permission ( self , user_id , do_raise = True ) : return self . network . check_read_permission ( user_id , do_raise = do_raise )
Check whether this user can read this link
46,094
def check_write_permission ( self , user_id , do_raise = True ) : return self . network . check_write_permission ( user_id , do_raise = do_raise )
Check whether this user can write this link
46,095
def get_items ( self , scenario_id ) : items = get_session ( ) . query ( ResourceGroupItem ) . filter ( ResourceGroupItem . group_id == self . id ) . filter ( ResourceGroupItem . scenario_id == scenario_id ) . all ( ) return items
Get all the items in this group in the given scenario
46,096
def set_ref ( self , ref_key , ref_id ) : if ref_key == 'NETWORK' : self . network_id = ref_id elif ref_key == 'NODE' : self . node_id = ref_id elif ref_key == 'LINK' : self . link_id = ref_id elif ref_key == 'GROUP' : self . group_id = ref_id elif ref_key == 'SCENARIO' : self . scenario_id = ref_id elif ref_key == 'PROJECT' : self . project_id = ref_id else : raise HydraError ( "Ref Key %s not recognised." % ref_key )
Using a ref key and ref id set the reference to the appropriate resource type .
46,097
def roles ( self ) : roles = [ ] for ur in self . roleusers : roles . append ( ur . role ) return set ( roles )
Return a set with all roles granted to the user .
46,098
def is_admin ( self ) : for ur in self . roleusers : if ur . role . code == 'admin' : return True return False
Check that the user has a role with the code admin
46,099
def _check_dimension ( typeattr , unit_id = None ) : if unit_id is None : unit_id = typeattr . unit_id dimension_id = _get_attr ( typeattr . attr_id ) . dimension_id if unit_id is not None and dimension_id is None : unit_dimension_id = units . get_dimension_by_unit_id ( unit_id ) . id raise HydraError ( "Unit %s (abbreviation=%s) has dimension_id %s(name=%s), but attribute has no dimension" % ( unit_id , units . get_unit ( unit_id ) . abbreviation , unit_dimension_id , units . get_dimension ( unit_dimension_id , do_accept_dimension_id_none = True ) . name ) ) elif unit_id is not None and dimension_id is not None : unit_dimension_id = units . get_dimension_by_unit_id ( unit_id ) . id if unit_dimension_id != dimension_id : raise HydraError ( "Unit %s (abbreviation=%s) has dimension_id %s(name=%s), but attribute has dimension_id %s(name=%s)" % ( unit_id , units . get_unit ( unit_id ) . abbreviation , unit_dimension_id , units . get_dimension ( unit_dimension_id , do_accept_dimension_id_none = True ) . name , dimension_id , units . get_dimension ( dimension_id , do_accept_dimension_id_none = True ) . name ) )
Check that the unit and dimension on a type attribute match . Alternatively pass in a unit manually to check against the dimension of the type attribute