idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
45,900 | def list_resources ( self , lang ) : return registry . registry . http_handler . get ( '/api/2/project/%s/resources/' % ( self . get_project_slug ( lang ) , ) ) | Return a sequence of resources for a given lang . |
45,901 | def resources ( self , lang , slug ) : resource = resources . Resource . get ( project_slug = self . get_project_slug ( lang ) , slug = slug , ) return resource | Generate a list of Resources in the Project . |
45,902 | def resource_exists ( self , slug , locale , project_slug = None ) : try : resource = resources . Resource . get ( project_slug = project_slug or self . get_project_slug ( locale ) , slug = slug , ) return resource except NotFoundError : pass return None | Return True if a Resource with the given slug exists in locale . |
45,903 | def get_event_list ( config ) : eventinstances = session_request ( config . session . post , device_event_url . format ( proto = config . web_proto , host = config . host , port = config . port ) , auth = config . session . auth , headers = headers , data = request_xml ) raw_event_list = _prepare_event ( eventinstances ) event_list = { } for entry in MAP + METAMAP : instance = raw_event_list try : for item in sum ( entry [ MAP_BASE ] . values ( ) , [ ] ) : instance = instance [ item ] except KeyError : continue event_list [ entry [ MAP_TYPE ] ] = instance return event_list | Get a dict of supported events from device . |
45,904 | def _prepare_event ( eventinstances ) : import xml . etree . ElementTree as ET def parse_event ( events ) : def clean_attrib ( attrib = { } ) : attributes = { } for key , value in attrib . items ( ) : attributes [ key . split ( '}' ) [ - 1 ] ] = value return attributes description = { } for child in events : child_tag = child . tag . split ( '}' ) [ - 1 ] child_attrib = clean_attrib ( child . attrib ) if child_tag != 'MessageInstance' : description [ child_tag ] = { ** child_attrib , ** parse_event ( child ) } elif child_tag == 'MessageInstance' : description = { } for item in child : tag = item . tag . split ( '}' ) [ - 1 ] description [ tag ] = clean_attrib ( item [ 0 ] . attrib ) return description root = ET . fromstring ( eventinstances ) return parse_event ( root [ 0 ] [ 0 ] [ 0 ] ) | Converts event instances to a relevant dictionary . |
45,905 | def url ( self ) : return URL . format ( http = self . web_proto , host = self . host , port = self . port ) | Represent device base url . |
45,906 | def process_raw ( self , raw : dict ) -> None : raw_ports = { } for param in raw : port_index = REGEX_PORT_INDEX . search ( param ) . group ( 0 ) if port_index not in raw_ports : raw_ports [ port_index ] = { } name = param . replace ( IOPORT + '.I' + port_index + '.' , '' ) raw_ports [ port_index ] [ name ] = raw [ param ] super ( ) . process_raw ( raw_ports ) | Pre - process raw dict . |
45,907 | def name ( self ) -> str : if self . direction == DIRECTION_IN : return self . raw . get ( 'Input.Name' , '' ) return self . raw . get ( 'Output.Name' , '' ) | Return name relevant to direction . |
45,908 | def action ( self , action ) : r if not self . direction == DIRECTION_OUT : return port_action = quote ( '{port}:{action}' . format ( port = int ( self . id ) + 1 , action = action ) , safe = '' ) url = URL + ACTION . format ( action = port_action ) self . _request ( 'get' , url ) | r Activate or deactivate an output . |
45,909 | def initialize_params ( self , preload_data = True ) -> None : params = '' if preload_data : params = self . request ( 'get' , param_url ) self . params = Params ( params , self . request ) | Load device parameters and initialize parameter management . |
45,910 | def initialize_ports ( self ) -> None : if not self . params : self . initialize_params ( preload_data = False ) self . params . update_ports ( ) self . ports = Ports ( self . params , self . request ) | Load IO port parameters for device . |
45,911 | def initialize_users ( self ) -> None : users = self . request ( 'get' , pwdgrp_url ) self . users = Users ( users , self . request ) | Load device user data and initialize user management . |
45,912 | def new_event ( self , event_data : str ) -> None : event = self . parse_event_xml ( event_data ) if EVENT_OPERATION in event : self . manage_event ( event ) | New event to process . |
45,913 | def parse_event_xml ( self , event_data ) -> dict : event = { } event_xml = event_data . decode ( ) message = MESSAGE . search ( event_xml ) if not message : return { } event [ EVENT_OPERATION ] = message . group ( EVENT_OPERATION ) topic = TOPIC . search ( event_xml ) if topic : event [ EVENT_TOPIC ] = topic . group ( EVENT_TOPIC ) source = SOURCE . search ( event_xml ) if source : event [ EVENT_SOURCE ] = source . group ( EVENT_SOURCE ) event [ EVENT_SOURCE_IDX ] = source . group ( EVENT_SOURCE_IDX ) data = DATA . search ( event_xml ) if data : event [ EVENT_TYPE ] = data . group ( EVENT_TYPE ) event [ EVENT_VALUE ] = data . group ( EVENT_VALUE ) _LOGGER . debug ( event ) return event | Parse metadata xml . |
45,914 | def manage_event ( self , event ) -> None : name = EVENT_NAME . format ( topic = event [ EVENT_TOPIC ] , source = event . get ( EVENT_SOURCE_IDX ) ) if event [ EVENT_OPERATION ] == 'Initialized' and name not in self . events : for event_class in EVENT_CLASSES : if event_class . TOPIC in event [ EVENT_TOPIC ] : self . events [ name ] = event_class ( event ) self . signal ( 'add' , name ) return _LOGGER . debug ( 'Unsupported event %s' , event [ EVENT_TOPIC ] ) elif event [ EVENT_OPERATION ] == 'Changed' and name in self . events : self . events [ name ] . state = event [ EVENT_VALUE ] | Received new metadata . |
45,915 | def state ( self , state : str ) -> None : self . _state = state for callback in self . _callbacks : callback ( ) | Update state of event . |
45,916 | def remove_callback ( self , callback ) -> None : if callback in self . _callbacks : self . _callbacks . remove ( callback ) | Remove callback . |
45,917 | def enable_events ( self , event_callback = None ) -> None : self . event = EventManager ( event_callback ) self . stream . event = self . event | Enable events for stream . |
45,918 | def update_brand ( self ) -> None : self . update ( path = URL_GET + GROUP . format ( group = BRAND ) ) | Update brand group of parameters . |
45,919 | def update_ports ( self ) -> None : self . update ( path = URL_GET + GROUP . format ( group = INPUT ) ) self . update ( path = URL_GET + GROUP . format ( group = IOPORT ) ) self . update ( path = URL_GET + GROUP . format ( group = OUTPUT ) ) | Update port groups of parameters . |
45,920 | def ports ( self ) -> dict : return { param : self [ param ] . raw for param in self if param . startswith ( IOPORT ) } | Create a smaller dictionary containing all ports . |
45,921 | def update_properties ( self ) -> None : self . update ( path = URL_GET + GROUP . format ( group = PROPERTIES ) ) | Update properties group of parameters . |
45,922 | def delete ( self , user : str ) -> None : data = { 'action' : 'remove' , 'user' : user } self . _request ( 'post' , URL , data = data ) | Remove user . |
45,923 | def start ( self ) : conn = self . loop . create_connection ( lambda : self , self . session . host , self . session . port ) task = self . loop . create_task ( conn ) task . add_done_callback ( self . init_done ) | Start session . |
45,924 | def stop ( self ) : if self . transport : self . transport . write ( self . method . TEARDOWN ( ) . encode ( ) ) self . transport . close ( ) self . rtp . stop ( ) | Stop session . |
45,925 | def connection_made ( self , transport ) : self . transport = transport self . transport . write ( self . method . message . encode ( ) ) self . time_out_handle = self . loop . call_later ( TIME_OUT_LIMIT , self . time_out ) | Connect to device is successful . |
45,926 | def data_received ( self , data ) : self . time_out_handle . cancel ( ) self . session . update ( data . decode ( ) ) if self . session . state == STATE_STARTING : self . transport . write ( self . method . message . encode ( ) ) self . time_out_handle = self . loop . call_later ( TIME_OUT_LIMIT , self . time_out ) elif self . session . state == STATE_PLAYING : self . callback ( SIGNAL_PLAYING ) if self . session . session_timeout != 0 : interval = self . session . session_timeout - 5 self . loop . call_later ( interval , self . keep_alive ) else : self . stop ( ) | Got response on RTSP session . |
45,927 | def time_out ( self ) : _LOGGER . warning ( 'Response timed out %s' , self . session . host ) self . stop ( ) self . callback ( SIGNAL_FAILED ) | If we don t get a response within time the RTSP request time out . |
45,928 | def message ( self ) : message = self . message_methods [ self . session . method ] ( ) _LOGGER . debug ( message ) return message | Return RTSP method based on sequence number from session . |
45,929 | def OPTIONS ( self , authenticate = True ) : message = "OPTIONS " + self . session . url + " RTSP/1.0\r\n" message += self . sequence message += self . authentication if authenticate else '' message += self . user_agent message += self . session_id message += '\r\n' return message | Request options device supports . |
45,930 | def DESCRIBE ( self ) : message = "DESCRIBE " + self . session . url + " RTSP/1.0\r\n" message += self . sequence message += self . authentication message += self . user_agent message += "Accept: application/sdp\r\n" message += '\r\n' return message | Request description of what services RTSP server make available . |
45,931 | def SETUP ( self ) : message = "SETUP " + self . session . control_url + " RTSP/1.0\r\n" message += self . sequence message += self . authentication message += self . user_agent message += self . transport message += '\r\n' return message | Set up stream transport . |
45,932 | def PLAY ( self ) : message = "PLAY " + self . session . url + " RTSP/1.0\r\n" message += self . sequence message += self . authentication message += self . user_agent message += self . session_id message += '\r\n' return message | RTSP session is ready to send data . |
45,933 | def authentication ( self ) : if self . session . digest : authentication = self . session . generate_digest ( ) elif self . session . basic : authentication = self . session . generate_basic ( ) else : return '' return "Authorization: " + authentication + '\r\n' | Generate authentication string . |
45,934 | def transport ( self ) : transport = "Transport: RTP/AVP;unicast;client_port={}-{}\r\n" return transport . format ( str ( self . session . rtp_port ) , str ( self . session . rtcp_port ) ) | Generate transport string . |
45,935 | def state ( self ) : if self . method in [ 'OPTIONS' , 'DESCRIBE' , 'SETUP' , 'PLAY' ] : state = STATE_STARTING elif self . method in [ 'KEEP-ALIVE' ] : state = STATE_PLAYING else : state = STATE_STOPPED _LOGGER . debug ( 'RTSP session (%s) state %s' , self . host , state ) return state | Which state the session is in . |
45,936 | def stream_url ( self ) : rtsp_url = RTSP_URL . format ( host = self . config . host , video = self . video_query , audio = self . audio_query , event = self . event_query ) _LOGGER . debug ( rtsp_url ) return rtsp_url | Build url for stream . |
45,937 | def session_callback ( self , signal ) : if signal == SIGNAL_DATA : self . event . new_event ( self . data ) elif signal == SIGNAL_FAILED : self . retry ( ) if signal in [ SIGNAL_PLAYING , SIGNAL_FAILED ] and self . connection_status_callback : self . connection_status_callback ( signal ) | Signalling from stream session . |
45,938 | def start ( self ) : if not self . stream or self . stream . session . state == STATE_STOPPED : self . stream = RTSPClient ( self . config . loop , self . stream_url , self . config . host , self . config . username , self . config . password , self . session_callback ) self . stream . start ( ) | Start stream . |
45,939 | def stop ( self ) : if self . stream and self . stream . session . state != STATE_STOPPED : self . stream . stop ( ) | Stop stream . |
45,940 | def retry ( self ) : self . stream = None self . config . loop . call_later ( RETRY_TIMER , self . start ) _LOGGER . debug ( 'Reconnecting to %s' , self . config . host ) | No connection to device retry connection after 15 seconds . |
45,941 | def check_perm ( user_id , permission_code ) : try : perm = db . DBSession . query ( Perm ) . filter ( Perm . code == permission_code ) . one ( ) except NoResultFound : raise PermissionError ( "Nonexistent permission type: %s" % ( permission_code ) ) try : res = db . DBSession . query ( User ) . join ( RoleUser , RoleUser . user_id == User . id ) . join ( Perm , Perm . id == perm . id ) . join ( RolePerm , RolePerm . perm_id == Perm . id ) . filter ( User . id == user_id ) . one ( ) except NoResultFound : raise PermissionError ( "Permission denied. User %s does not have permission %s" % ( user_id , permission_code ) ) | Checks whether a user has permission to perform an action . The permission_code parameter should be a permission contained in tPerm . |
45,942 | def required_perms ( * req_perms ) : def dec_wrapper ( wfunc ) : @ wraps ( wfunc ) def wrapped ( * args , ** kwargs ) : user_id = kwargs . get ( "user_id" ) for perm in req_perms : check_perm ( user_id , perm ) return wfunc ( * args , ** kwargs ) return wrapped return dec_wrapper | Decorator applied to functions requiring caller to possess permission Takes args tuple of required perms and raises PermissionsError via check_perm if these are not a subset of user perms |
45,943 | def required_role ( req_role ) : def dec_wrapper ( wfunc ) : @ wraps ( wfunc ) def wrapped ( * args , ** kwargs ) : user_id = kwargs . get ( "user_id" ) try : res = db . DBSession . query ( RoleUser ) . filter ( RoleUser . user_id == user_id ) . join ( Role , Role . code == req_role ) . one ( ) except NoResultFound : raise PermissionError ( "Permission denied. User %s does not have role %s" % ( user_id , req_role ) ) return wfunc ( * args , ** kwargs ) return wrapped return dec_wrapper | Decorator applied to functions requiring caller to possess the specified role |
45,944 | def get_time_period ( period_name ) : time_abbreviation = time_map . get ( period_name . lower ( ) ) if time_abbreviation is None : raise Exception ( "Symbol %s not recognised as a time period" % period_name ) return time_abbreviation | Given a time period name fetch the hydra - compatible time abbreviation . |
45,945 | def get_datetime ( timestamp ) : timestamp_is_float = False try : float ( timestamp ) timestamp_is_float = True except ( ValueError , TypeError ) : pass if timestamp_is_float == True : raise ValueError ( "Timestamp %s is a float" % ( timestamp , ) ) try : parsed_dt = parse ( timestamp , dayfirst = False ) if parsed_dt . tzinfo is None : return parsed_dt else : return parsed_dt . replace ( tzinfo = None ) except : pass if isinstance ( timestamp , datetime ) : return timestamp fmt = guess_timefmt ( timestamp ) if fmt is None : fmt = FORMAT try : ts_time = datetime . strptime ( timestamp , fmt ) except ValueError as e : if e . message . split ( ' ' , 1 ) [ 0 ] . strip ( ) == 'unconverted' : utcoffset = e . message . split ( ) [ 3 ] . strip ( ) timestamp = timestamp . replace ( utcoffset , '' ) ts_time = datetime . strptime ( timestamp , fmt ) tzoffset = timedelta ( hours = int ( utcoffset [ 0 : 3 ] ) , minutes = int ( utcoffset [ 3 : 5 ] ) ) ts_time -= tzoffset else : raise e return ts_time | Turn a string timestamp into a date time . First tries to use dateutil . Failing that it tries to guess the time format and converts it manually using stfptime . |
45,946 | def timestamp_to_ordinal ( timestamp ) : if timestamp is None : return None ts_time = get_datetime ( timestamp ) ordinal_ts_time = Decimal ( ts_time . toordinal ( ) ) total_seconds = ( ts_time - datetime ( ts_time . year , ts_time . month , ts_time . day , 0 , 0 , 0 ) ) . total_seconds ( ) fraction = ( Decimal ( repr ( total_seconds ) ) / Decimal ( 86400 ) ) . quantize ( Decimal ( '.00000000000000000001' ) , rounding = ROUND_HALF_UP ) ordinal_ts_time += fraction log . debug ( "%s converted to %s" , timestamp , ordinal_ts_time ) return ordinal_ts_time | Convert a timestamp as defined in the soap interface to the time format stored in the database . |
45,947 | def guess_timefmt ( datestr ) : if isinstance ( datestr , float ) or isinstance ( datestr , int ) : return None seasonal_key = str ( config . get ( 'DEFAULT' , 'seasonal_key' , '9999' ) ) if datestr . find ( 'T' ) > 0 : dt_delim = 'T' else : dt_delim = ' ' delimiters = [ '-' , '.' , ' ' , '/' ] formatstrings = [ [ '%Y' , '%m' , '%d' ] , [ '%d' , '%m' , '%Y' ] , [ '%d' , '%b' , '%Y' ] , [ 'XXXX' , '%m' , '%d' ] , [ '%d' , '%m' , 'XXXX' ] , [ '%d' , '%b' , 'XXXX' ] , [ seasonal_key , '%m' , '%d' ] , [ '%d' , '%m' , seasonal_key ] , [ '%d' , '%b' , seasonal_key ] ] timeformats = [ '%H:%M:%S.%f' , '%H:%M:%S' , '%H:%M' , '%H:%M:%S.%f000Z' , '%H:%M:%S.%fZ' ] for timefmt in timeformats : try : datetime . strptime ( datestr . split ( dt_delim ) [ - 1 ] . strip ( ) , timefmt ) usetime = True break except ValueError : usetime = False for fmt in formatstrings : for delim in delimiters : datefmt = fmt [ 0 ] + delim + fmt [ 1 ] + delim + fmt [ 2 ] if usetime : for timefmt in timeformats : complfmt = datefmt + dt_delim + timefmt try : datetime . strptime ( datestr , complfmt ) return complfmt except ValueError : pass else : try : datetime . strptime ( datestr , datefmt ) return datefmt except ValueError : pass custom_formats = [ '%d/%m/%Y' , '%b %d %Y' , '%B %d %Y' , '%d/%m/XXXX' , '%d/%m/' + seasonal_key ] for fmt in custom_formats : if usetime : for timefmt in timeformats : complfmt = fmt + dt_delim + timefmt try : datetime . strptime ( datestr , complfmt ) return complfmt except ValueError : pass else : try : datetime . strptime ( datestr , fmt ) return fmt except ValueError : pass return None | Try to guess the format a date is written in . |
45,948 | def reindex_timeseries ( ts_string , new_timestamps ) : if not isinstance ( new_timestamps , list ) : new_timestamps = [ new_timestamps ] new_timestamps_converted = [ ] for t in new_timestamps : new_timestamps_converted . append ( get_datetime ( t ) ) new_timestamps = new_timestamps_converted seasonal_year = config . get ( 'DEFAULT' , 'seasonal_year' , '1678' ) seasonal_key = config . get ( 'DEFAULT' , 'seasonal_key' , '9999' ) ts = ts_string . replace ( seasonal_key , seasonal_year ) timeseries = pd . read_json ( ts ) idx = timeseries . index ts_timestamps = new_timestamps if type ( idx ) == pd . DatetimeIndex : if set ( idx . year ) == set ( [ int ( seasonal_year ) ] ) : if isinstance ( new_timestamps , list ) : seasonal_timestamp = [ ] for t in ts_timestamps : t_1900 = t . replace ( year = int ( seasonal_year ) ) seasonal_timestamp . append ( t_1900 ) ts_timestamps = seasonal_timestamp reindexed_ts = timeseries . reindex ( ts_timestamps , method = 'ffill' ) i = reindexed_ts . index reindexed_ts . index = pd . Index ( new_timestamps , names = i . names ) if len ( reindexed_ts . dropna ( ) ) == 0 : return None pandas_ts = reindexed_ts . where ( reindexed_ts . notnull ( ) , None ) return pandas_ts | get data for timesamp |
45,949 | def parse_time_step ( time_step , target = 's' , units_ref = None ) : log . info ( "Parsing time step %s" , time_step ) value = re . findall ( r'\d+' , time_step ) [ 0 ] valuelen = len ( value ) try : value = float ( value ) except : HydraPluginError ( "Unable to extract number of time steps (%s) from time step %s" % ( value , time_step ) ) unit = time_step [ valuelen : ] . strip ( ) period = get_time_period ( unit ) log . info ( "Time period is %s" , period ) converted_time_step = units_ref . convert ( value , period , target ) log . info ( "Time period is %s %s" , converted_time_step , period ) return float ( converted_time_step ) , value , period | Read in the time step and convert it to seconds . |
45,950 | def get_time_axis ( start_time , end_time , time_step , time_axis = None ) : from . . lib import units if time_axis is not None : actual_dates_axis = [ ] for t in time_axis : t = t . replace ( ',' , '' ) . strip ( ) if t == '' : continue actual_dates_axis . append ( get_datetime ( t ) ) return actual_dates_axis else : if start_time is None : raise HydraPluginError ( "A start time must be specified" ) if end_time is None : raise HydraPluginError ( "And end time must be specified" ) if time_step is None : raise HydraPluginError ( "A time-step must be specified" ) start_date = get_datetime ( start_time ) end_date = get_datetime ( end_time ) delta_t , value , output_units = parse_time_step ( time_step , units_ref = units ) time_axis = [ start_date ] value = int ( value ) while start_date < end_date : if ( output_units . lower ( ) == "mon" ) : start_date = start_date + relativedelta ( months = value ) elif ( output_units . lower ( ) == "yr" ) : start_date = start_date + relativedelta ( years = value ) else : start_date += timedelta ( seconds = delta_t ) time_axis . append ( start_date ) return time_axis | Create a list of datetimes based on an start time end time and time step . If such a list is already passed in then this is not necessary . |
45,951 | def _get_all_attributes ( network ) : attrs = network . attributes for n in network . nodes : attrs . extend ( n . attributes ) for l in network . links : attrs . extend ( l . attributes ) for g in network . resourcegroups : attrs . extend ( g . attributes ) return attrs | Get all the complex mode attributes in the network so that they can be used for mapping to resource scenarios later . |
45,952 | def _get_all_group_items ( network_id ) : base_qry = db . DBSession . query ( ResourceGroupItem ) item_qry = base_qry . join ( Scenario ) . filter ( Scenario . network_id == network_id ) x = time . time ( ) logging . info ( "Getting all items" ) all_items = db . DBSession . execute ( item_qry . statement ) . fetchall ( ) log . info ( "%s groups jointly retrieved in %s" , len ( all_items ) , time . time ( ) - x ) logging . info ( "items retrieved. Processing results..." ) x = time . time ( ) item_dict = dict ( ) for item in all_items : items = item_dict . get ( item . scenario_id , [ ] ) items . append ( item ) item_dict [ item . scenario_id ] = items logging . info ( "items processed in %s" , time . time ( ) - x ) return item_dict | Get all the resource group items in the network across all scenarios returns a dictionary of dict objects keyed on scenario_id |
45,953 | def _get_all_resourcescenarios ( network_id , user_id ) : rs_qry = db . DBSession . query ( Dataset . type , Dataset . unit_id , Dataset . name , Dataset . hash , Dataset . cr_date , Dataset . created_by , Dataset . hidden , Dataset . value , ResourceScenario . dataset_id , ResourceScenario . scenario_id , ResourceScenario . resource_attr_id , ResourceScenario . source , ResourceAttr . attr_id , ) . outerjoin ( DatasetOwner , and_ ( DatasetOwner . dataset_id == Dataset . id , DatasetOwner . user_id == user_id ) ) . filter ( or_ ( Dataset . hidden == 'N' , Dataset . created_by == user_id , DatasetOwner . user_id != None ) , ResourceAttr . id == ResourceScenario . resource_attr_id , Scenario . id == ResourceScenario . scenario_id , Scenario . network_id == network_id , Dataset . id == ResourceScenario . dataset_id ) x = time . time ( ) logging . info ( "Getting all resource scenarios" ) all_rs = db . DBSession . execute ( rs_qry . statement ) . fetchall ( ) log . info ( "%s resource scenarios retrieved in %s" , len ( all_rs ) , time . time ( ) - x ) logging . info ( "resource scenarios retrieved. Processing results..." ) x = time . time ( ) rs_dict = dict ( ) for rs in all_rs : rs_obj = JSONObject ( rs ) rs_attr = JSONObject ( { 'attr_id' : rs . attr_id } ) value = rs . value rs_dataset = JSONDataset ( { 'id' : rs . dataset_id , 'type' : rs . type , 'unit_id' : rs . unit_id , 'name' : rs . name , 'hash' : rs . hash , 'cr_date' : rs . cr_date , 'created_by' : rs . created_by , 'hidden' : rs . hidden , 'value' : value , 'metadata' : { } , } ) rs_obj . resourceattr = rs_attr rs_obj . value = rs_dataset rs_obj . dataset = rs_dataset scenario_rs = rs_dict . get ( rs . scenario_id , [ ] ) scenario_rs . append ( rs_obj ) rs_dict [ rs . scenario_id ] = scenario_rs logging . info ( "resource scenarios processed in %s" , time . time ( ) - x ) return rs_dict | Get all the resource scenarios in a network across all scenarios returns a dictionary of dict objects keyed on scenario_id |
45,954 | def _get_metadata ( network_id , user_id ) : log . info ( "Getting Metadata" ) dataset_qry = db . DBSession . query ( Dataset ) . outerjoin ( DatasetOwner , and_ ( DatasetOwner . dataset_id == Dataset . id , DatasetOwner . user_id == user_id ) ) . filter ( or_ ( Dataset . hidden == 'N' , DatasetOwner . user_id != None ) , Scenario . id == ResourceScenario . scenario_id , Scenario . network_id == network_id , Dataset . id == ResourceScenario . dataset_id ) . distinct ( ) . subquery ( ) rs_qry = db . DBSession . query ( Metadata ) . join ( dataset_qry , Metadata . dataset_id == dataset_qry . c . id ) x = time . time ( ) logging . info ( "Getting all matadata" ) all_metadata = db . DBSession . execute ( rs_qry . statement ) . fetchall ( ) log . info ( "%s metadata jointly retrieved in %s" , len ( all_metadata ) , time . time ( ) - x ) logging . info ( "metadata retrieved. Processing results..." ) x = time . time ( ) metadata_dict = dict ( ) for m in all_metadata : if metadata_dict . get ( m . dataset_id ) : metadata_dict [ m . dataset_id ] [ m . key ] = six . text_type ( m . value ) else : metadata_dict [ m . dataset_id ] = { m . key : six . text_type ( m . value ) } logging . info ( "metadata processed in %s" , time . time ( ) - x ) return metadata_dict | Get all the metadata in a network across all scenarios returns a dictionary of dict objects keyed on dataset ID |
45,955 | def _get_links ( network_id , template_id = None ) : extras = { 'types' : [ ] , 'attributes' : [ ] } link_qry = db . DBSession . query ( Link ) . filter ( Link . network_id == network_id , Link . status == 'A' ) . options ( noload ( 'network' ) ) if template_id is not None : link_qry = link_qry . filter ( ResourceType . link_id == Link . id , TemplateType . id == ResourceType . type_id , TemplateType . template_id == template_id ) link_res = db . DBSession . execute ( link_qry . statement ) . fetchall ( ) links = [ ] for l in link_res : links . append ( JSONObject ( l , extras = extras ) ) return links | Get all the links in a network |
45,956 | def _get_groups ( network_id , template_id = None ) : extras = { 'types' : [ ] , 'attributes' : [ ] } group_qry = db . DBSession . query ( ResourceGroup ) . filter ( ResourceGroup . network_id == network_id , ResourceGroup . status == 'A' ) . options ( noload ( 'network' ) ) if template_id is not None : group_qry = group_qry . filter ( ResourceType . group_id == ResourceGroup . id , TemplateType . id == ResourceType . type_id , TemplateType . template_id == template_id ) group_res = db . DBSession . execute ( group_qry . statement ) . fetchall ( ) groups = [ ] for g in group_res : groups . append ( JSONObject ( g , extras = extras ) ) return groups | Get all the resource groups in a network |
45,957 | def _get_scenarios ( network_id , include_data , user_id , scenario_ids = None ) : scen_qry = db . DBSession . query ( Scenario ) . filter ( Scenario . network_id == network_id ) . options ( noload ( 'network' ) ) . filter ( Scenario . status == 'A' ) if scenario_ids : logging . info ( "Filtering by scenario_ids %s" , scenario_ids ) scen_qry = scen_qry . filter ( Scenario . id . in_ ( scenario_ids ) ) extras = { 'resourcescenarios' : [ ] , 'resourcegroupitems' : [ ] } scens = [ JSONObject ( s , extras = extras ) for s in db . DBSession . execute ( scen_qry . statement ) . fetchall ( ) ] all_resource_group_items = _get_all_group_items ( network_id ) if include_data == 'Y' or include_data == True : all_rs = _get_all_resourcescenarios ( network_id , user_id ) metadata = _get_metadata ( network_id , user_id ) for s in scens : s . resourcegroupitems = all_resource_group_items . get ( s . id , [ ] ) if include_data == 'Y' or include_data == True : s . resourcescenarios = all_rs . get ( s . id , [ ] ) for rs in s . resourcescenarios : rs . dataset . metadata = metadata . get ( rs . dataset_id , { } ) return scens | Get all the scenarios in a network |
45,958 | def set_network_status ( network_id , status , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : net_i = db . DBSession . query ( Network ) . filter ( Network . id == network_id ) . one ( ) net_i . check_write_permission ( user_id ) net_i . status = status except NoResultFound : raise ResourceNotFoundError ( "Network %s not found" % ( network_id ) ) db . DBSession . flush ( ) return 'OK' | Activates a network by setting its status attribute to A . |
45,959 | def get_network_extents ( network_id , ** kwargs ) : rs = db . DBSession . query ( Node . x , Node . y ) . filter ( Node . network_id == network_id ) . all ( ) if len ( rs ) == 0 : return dict ( network_id = network_id , min_x = None , max_x = None , min_y = None , max_y = None , ) x = [ r . x for r in rs if r . x is not None ] if len ( x ) > 0 : x_min = min ( x ) x_max = max ( x ) else : x_min , x_max = 0 , 1 y = [ r . y for r in rs if r . y is not None ] if len ( y ) > 0 : y_min = min ( y ) y_max = max ( y ) else : y_min , y_max = 0 , 1 ne = JSONObject ( dict ( network_id = network_id , min_x = x_min , max_x = x_max , min_y = y_min , max_y = y_max , ) ) return ne | Given a network return its maximum extents . This would be the minimum x value of all nodes the minimum y value of all nodes the maximum x value of all nodes and maximum y value of all nodes . |
45,960 | def add_nodes ( network_id , nodes , ** kwargs ) : start_time = datetime . datetime . now ( ) names = [ ] for n_i in nodes : if n_i . name in names : raise HydraError ( "Duplicate Node Name: %s" % ( n_i . name ) ) names . append ( n_i . name ) user_id = kwargs . get ( 'user_id' ) try : net_i = db . DBSession . query ( Network ) . filter ( Network . id == network_id ) . one ( ) net_i . check_write_permission ( user_id ) except NoResultFound : raise ResourceNotFoundError ( "Network %s not found" % ( network_id ) ) _add_nodes_to_database ( net_i , nodes ) net_i . project_id = net_i . project_id db . DBSession . flush ( ) node_s = db . DBSession . query ( Node ) . filter ( Node . network_id == network_id ) . all ( ) node_id_map = dict ( ) iface_nodes = dict ( ) for n_i in node_s : iface_nodes [ n_i . name ] = n_i for node in nodes : node_id_map [ node . id ] = iface_nodes [ node . name ] _bulk_add_resource_attrs ( network_id , 'NODE' , nodes , iface_nodes ) log . info ( "Nodes added in %s" , get_timing ( start_time ) ) return node_s | Add nodes to network |
45,961 | def add_links ( network_id , links , ** kwargs ) : start_time = datetime . datetime . now ( ) user_id = kwargs . get ( 'user_id' ) names = [ ] for l_i in links : if l_i . name in names : raise HydraError ( "Duplicate Link Name: %s" % ( l_i . name ) ) names . append ( l_i . name ) try : net_i = db . DBSession . query ( Network ) . filter ( Network . id == network_id ) . one ( ) net_i . check_write_permission ( user_id ) except NoResultFound : raise ResourceNotFoundError ( "Network %s not found" % ( network_id ) ) node_id_map = dict ( ) for node in net_i . nodes : node_id_map [ node . id ] = node _add_links_to_database ( net_i , links , node_id_map ) net_i . project_id = net_i . project_id db . DBSession . flush ( ) link_s = db . DBSession . query ( Link ) . filter ( Link . network_id == network_id ) . all ( ) iface_links = { } for l_i in link_s : iface_links [ l_i . name ] = l_i link_attrs = _bulk_add_resource_attrs ( net_i . id , 'LINK' , links , iface_links ) log . info ( "Nodes added in %s" , get_timing ( start_time ) ) return link_s | add links to network |
45,962 | def update_node ( node , flush = True , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : node_i = db . DBSession . query ( Node ) . filter ( Node . id == node . id ) . one ( ) except NoResultFound : raise ResourceNotFoundError ( "Node %s not found" % ( node . id ) ) node_i . network . check_write_permission ( user_id ) node_i . name = node . name if node . name is not None else node_i . name node_i . x = node . x if node . x is not None else node_i . x node_i . y = node . y if node . y is not None else node_i . y node_i . description = node . description if node . description is not None else node_i . description node_i . layout = node . get_layout ( ) if node . layout is not None else node_i . layout if node . attributes is not None : _update_attributes ( node_i , node . attributes ) if node . types is not None : hdb . add_resource_types ( node_i , node . types ) if flush is True : db . DBSession . flush ( ) return node_i | Update a node . If new attributes are present they will be added to the node . The non - presence of attributes does not remove them . |
45,963 | def update_nodes ( nodes , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) updated_nodes = [ ] for n in nodes : updated_node_i = update_node ( n , flush = False , user_id = user_id ) updated_nodes . append ( updated_node_i ) db . DBSession . flush ( ) return updated_nodes | Update multiple nodes . If new attributes are present they will be added to the node . The non - presence of attributes does not remove them . |
45,964 | def set_node_status ( node_id , status , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : node_i = db . DBSession . query ( Node ) . filter ( Node . id == node_id ) . one ( ) except NoResultFound : raise ResourceNotFoundError ( "Node %s not found" % ( node_id ) ) node_i . network . check_write_permission ( user_id ) node_i . status = status for link in node_i . links_to : link . status = status for link in node_i . links_from : link . status = status db . DBSession . flush ( ) return node_i | Set the status of a node to X |
45,965 | def purge_network ( network_id , purge_data , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : net_i = db . DBSession . query ( Network ) . filter ( Network . id == network_id ) . one ( ) except NoResultFound : raise ResourceNotFoundError ( "Network %s not found" % ( network_id ) ) log . info ( "Deleting network %s, id=%s" , net_i . name , network_id ) net_i . check_write_permission ( user_id ) db . DBSession . delete ( net_i ) db . DBSession . flush ( ) return 'OK' | Remove a network from DB completely Use purge_data to try to delete the data associated with only this network . If no other resources link to this data it will be deleted . |
45,966 | def _purge_datasets_unique_to_resource ( ref_key , ref_id ) : count_qry = db . DBSession . query ( ResourceScenario . dataset_id , func . count ( ResourceScenario . dataset_id ) ) . group_by ( ResourceScenario . dataset_id ) . filter ( ResourceScenario . resource_attr_id == ResourceAttr . id ) if ref_key == 'NODE' : count_qry . filter ( ResourceAttr . node_id == ref_id ) elif ref_key == 'LINK' : count_qry . filter ( ResourceAttr . link_id == ref_id ) elif ref_key == 'GROUP' : count_qry . filter ( ResourceAttr . group_id == ref_id ) count_rs = count_qry . all ( ) for dataset_id , count in count_rs : full_dataset_count = db . DBSession . query ( ResourceScenario ) . filter ( ResourceScenario . dataset_id == dataset_id ) . count ( ) if full_dataset_count == count : datasets_rs_to_delete = db . DBSession . query ( ResourceScenario ) . filter ( ResourceScenario . dataset_id == dataset_id ) . all ( ) for dataset_rs in datasets_rs_to_delete : db . DBSession . delete ( dataset_rs ) dataset_to_delete = db . DBSession . query ( Dataset ) . filter ( Dataset . id == dataset_id ) . one ( ) log . info ( "Deleting %s dataset %s (%s)" , ref_key , dataset_to_delete . name , dataset_to_delete . id ) db . DBSession . delete ( dataset_to_delete ) | Find the number of times a a resource and dataset combination occurs . If this equals the number of times the dataset appears then we can say this dataset is unique to this resource therefore it can be deleted |
45,967 | def delete_node ( node_id , purge_data , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : node_i = db . DBSession . query ( Node ) . filter ( Node . id == node_id ) . one ( ) except NoResultFound : raise ResourceNotFoundError ( "Node %s not found" % ( node_id ) ) group_items = db . DBSession . query ( ResourceGroupItem ) . filter ( ResourceGroupItem . node_id == node_id ) . all ( ) for gi in group_items : db . DBSession . delete ( gi ) if purge_data == 'Y' : _purge_datasets_unique_to_resource ( 'NODE' , node_id ) log . info ( "Deleting node %s, id=%s" , node_i . name , node_id ) node_i . network . check_write_permission ( user_id ) db . DBSession . delete ( node_i ) db . DBSession . flush ( ) return 'OK' | Remove node from DB completely If there are attributes on the node use purge_data to try to delete the data . If no other resources link to this data it will be deleted . |
45,968 | def add_link ( network_id , link , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : net_i = db . DBSession . query ( Network ) . filter ( Network . id == network_id ) . one ( ) net_i . check_write_permission ( user_id ) except NoResultFound : raise ResourceNotFoundError ( "Network %s not found" % ( network_id ) ) try : node_1 = db . DBSession . query ( Node ) . filter ( Node . id == link . node_1_id ) . one ( ) node_2 = db . DBSession . query ( Node ) . filter ( Node . id == link . node_2_id ) . one ( ) except NoResultFound : raise ResourceNotFoundError ( "Nodes for link not found" ) link_i = net_i . add_link ( link . name , link . description , link . layout , node_1 , node_2 ) hdb . add_resource_attributes ( link_i , link . attributes ) db . DBSession . flush ( ) if link . types is not None and len ( link . types ) > 0 : res_types = [ ] res_attrs = [ ] res_scenarios = { } for typesummary in link . types : ra , rt , rs = template . set_resource_type ( link_i , typesummary . id , ** kwargs ) res_types . append ( rt ) res_attrs . extend ( ra ) res_scenarios . update ( rs ) if len ( res_types ) > 0 : db . DBSession . bulk_insert_mappings ( ResourceType , res_types ) if len ( res_attrs ) > 0 : db . DBSession . bulk_insert_mappings ( ResourceAttr , res_attrs ) new_res_attrs = db . DBSession . query ( ResourceAttr ) . order_by ( ResourceAttr . id . desc ( ) ) . limit ( len ( res_attrs ) ) . all ( ) all_rs = [ ] for ra in new_res_attrs : ra_id = ra . id if ra . attr_id in res_scenarios : rs_list = res_scenarios [ ra . attr_id ] for rs in rs_list : rs_list [ rs ] [ 'resource_attr_id' ] = ra_id all_rs . append ( rs_list [ rs ] ) if len ( all_rs ) > 0 : db . DBSession . bulk_insert_mappings ( ResourceScenario , all_rs ) db . DBSession . refresh ( link_i ) return link_i | Add a link to a network |
45,969 | def update_link ( link , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : link_i = db . DBSession . query ( Link ) . filter ( Link . id == link . id ) . one ( ) link_i . network . check_write_permission ( user_id ) except NoResultFound : raise ResourceNotFoundError ( "Link %s not found" % ( link . id ) ) if link . name is not None : link_i . name = link . name if link . node_1_id is not None : link_i . node_1_id = link . node_1_id if link . node_2_id is not None : link_i . node_2_id = link . node_2_id if link . description is not None : link_i . description = link . description if link . layout is not None : link_i . layout = link . get_layout ( ) if link . attributes is not None : hdb . add_resource_attributes ( link_i , link . attributes ) if link . types is not None : hdb . add_resource_types ( link_i , link . types ) db . DBSession . flush ( ) return link_i | Update a link . |
45,970 | def set_link_status ( link_id , status , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : link_i = db . DBSession . query ( Link ) . filter ( Link . id == link_id ) . one ( ) except NoResultFound : raise ResourceNotFoundError ( "Link %s not found" % ( link_id ) ) link_i . network . check_write_permission ( user_id ) link_i . status = status db . DBSession . flush ( ) | Set the status of a link |
45,971 | def delete_link ( link_id , purge_data , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : link_i = db . DBSession . query ( Link ) . filter ( Link . id == link_id ) . one ( ) except NoResultFound : raise ResourceNotFoundError ( "Link %s not found" % ( link_id ) ) group_items = db . DBSession . query ( ResourceGroupItem ) . filter ( ResourceGroupItem . link_id == link_id ) . all ( ) for gi in group_items : db . DBSession . delete ( gi ) if purge_data == 'Y' : _purge_datasets_unique_to_resource ( 'LINK' , link_id ) log . info ( "Deleting link %s, id=%s" , link_i . name , link_id ) link_i . network . check_write_permission ( user_id ) db . DBSession . delete ( link_i ) db . DBSession . flush ( ) | Remove link from DB completely If there are attributes on the link use purge_data to try to delete the data . If no other resources link to this data it will be deleted . |
45,972 | def add_group ( network_id , group , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : net_i = db . DBSession . query ( Network ) . filter ( Network . id == network_id ) . one ( ) net_i . check_write_permission ( user_id = user_id ) except NoResultFound : raise ResourceNotFoundError ( "Network %s not found" % ( network_id ) ) res_grp_i = net_i . add_group ( group . name , group . description , group . status ) hdb . add_resource_attributes ( res_grp_i , group . attributes ) db . DBSession . flush ( ) if group . types is not None and len ( group . types ) > 0 : res_types = [ ] res_attrs = [ ] res_scenarios = { } for typesummary in group . types : ra , rt , rs = template . set_resource_type ( res_grp_i , typesummary . id , ** kwargs ) res_types . append ( rt ) res_attrs . extend ( ra ) res_scenarios . update ( rs ) if len ( res_types ) > 0 : db . DBSession . bulk_insert_mappings ( ResourceType , res_types ) if len ( res_attrs ) > 0 : db . DBSession . bulk_insert_mappings ( ResourceAttr , res_attrs ) new_res_attrs = db . DBSession . query ( ResourceAttr ) . order_by ( ResourceAttr . id . desc ( ) ) . limit ( len ( res_attrs ) ) . all ( ) all_rs = [ ] for ra in new_res_attrs : ra_id = ra . id if ra . attr_id in res_scenarios : rs_list = res_scenarios [ ra . attr_id ] for rs in rs_list : rs_list [ rs ] [ 'resource_attr_id' ] = ra_id all_rs . append ( rs_list [ rs ] ) if len ( all_rs ) > 0 : db . DBSession . bulk_insert_mappings ( ResourceScenario , all_rs ) db . DBSession . refresh ( res_grp_i ) return res_grp_i | Add a resourcegroup to a network |
45,973 | def update_group ( group , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : group_i = db . DBSession . query ( ResourceGroup ) . filter ( ResourceGroup . id == group . id ) . one ( ) except NoResultFound : raise ResourceNotFoundError ( "group %s not found" % ( group . id ) ) group_i . network . check_write_permission ( user_id ) group_i . name = group . name if group . name != None else group_i . name group_i . description = group . description if group . description else group_i . description if group . attributes is not None : _update_attributes ( group_i , group . attributes ) if group . types is not None : hdb . add_resource_types ( group_i , group . types ) db . DBSession . flush ( ) return group_i | Update a group . If new attributes are present they will be added to the group . The non - presence of attributes does not remove them . |
45,974 | def set_group_status ( group_id , status , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : group_i = db . DBSession . query ( ResourceGroup ) . filter ( ResourceGroup . id == group_id ) . one ( ) except NoResultFound : raise ResourceNotFoundError ( "ResourceGroup %s not found" % ( group_id ) ) group_i . network . check_write_permission ( user_id ) group_i . status = status db . DBSession . flush ( ) return group_i | Set the status of a group to X |
45,975 | def delete_group ( group_id , purge_data , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : group_i = db . DBSession . query ( ResourceGroup ) . filter ( ResourceGroup . id == group_id ) . one ( ) except NoResultFound : raise ResourceNotFoundError ( "Group %s not found" % ( group_id ) ) group_items = db . DBSession . query ( ResourceGroupItem ) . filter ( ResourceGroupItem . group_id == group_id ) . all ( ) for gi in group_items : db . DBSession . delete ( gi ) if purge_data == 'Y' : _purge_datasets_unique_to_resource ( 'GROUP' , group_id ) log . info ( "Deleting group %s, id=%s" , group_i . name , group_id ) group_i . network . check_write_permission ( user_id ) db . DBSession . delete ( group_i ) db . DBSession . flush ( ) | Remove group from DB completely If there are attributes on the group use purge_data to try to delete the data . If no other resources group to this data it will be deleted . |
45,976 | def get_scenarios ( network_id , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : net_i = db . DBSession . query ( Network ) . filter ( Network . id == network_id ) . one ( ) net_i . check_read_permission ( user_id = user_id ) except NoResultFound : raise ResourceNotFoundError ( "Network %s not found" % ( network_id ) ) return net_i . scenarios | Get all the scenarios in a given network . |
45,977 | def validate_network_topology ( network_id , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : net_i = db . DBSession . query ( Network ) . filter ( Network . id == network_id ) . one ( ) net_i . check_write_permission ( user_id = user_id ) except NoResultFound : raise ResourceNotFoundError ( "Network %s not found" % ( network_id ) ) nodes = [ ] for node_i in net_i . nodes : if node_i . status == 'A' : nodes . append ( node_i . node_id ) link_nodes = [ ] for link_i in net_i . links : if link_i . status != 'A' : continue if link_i . node_1_id not in link_nodes : link_nodes . append ( link_i . node_1_id ) if link_i . node_2_id not in link_nodes : link_nodes . append ( link_i . node_2_id ) nodes = set ( nodes ) link_nodes = set ( link_nodes ) isolated_nodes = nodes - link_nodes return isolated_nodes | Check for the presence of orphan nodes in a network . |
45,978 | def get_resources_of_type ( network_id , type_id , ** kwargs ) : nodes_with_type = db . DBSession . query ( Node ) . join ( ResourceType ) . filter ( Node . network_id == network_id , ResourceType . type_id == type_id ) . all ( ) links_with_type = db . DBSession . query ( Link ) . join ( ResourceType ) . filter ( Link . network_id == network_id , ResourceType . type_id == type_id ) . all ( ) groups_with_type = db . DBSession . query ( ResourceGroup ) . join ( ResourceType ) . filter ( ResourceGroup . network_id == network_id , ResourceType . type_id == type_id ) . all ( ) return nodes_with_type , links_with_type , groups_with_type | Return the Nodes Links and ResourceGroups which have the type specified . |
45,979 | def clean_up_network ( network_id , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : log . debug ( "Querying Network %s" , network_id ) net_i = db . DBSession . query ( Network ) . filter ( Network . id == network_id ) . options ( noload ( 'scenarios' ) ) . options ( noload ( 'nodes' ) ) . options ( noload ( 'links' ) ) . options ( noload ( 'resourcegroups' ) ) . options ( joinedload_all ( 'types.templatetype.template' ) ) . one ( ) net_i . attributes node_qry = db . DBSession . query ( Node ) . filter ( Node . network_id == network_id ) . filter ( Node . status == 'X' ) . all ( ) link_qry = db . DBSession . query ( Link ) . filter ( Link . network_id == network_id ) . filter ( Link . status == 'X' ) . all ( ) group_qry = db . DBSession . query ( ResourceGroup ) . filter ( ResourceGroup . network_id == network_id ) . filter ( ResourceGroup . status == 'X' ) . all ( ) scenario_qry = db . DBSession . query ( Scenario ) . filter ( Scenario . network_id == network_id ) . filter ( Scenario . status == 'X' ) . all ( ) for n in node_qry : db . DBSession . delete ( n ) for l in link_qry : db . DBSession . delete ( l ) for g in group_qry : db . DBSession . delete ( g ) for s in scenario_qry : db . DBSession . delete ( s ) except NoResultFound : raise ResourceNotFoundError ( "Network %s not found" % ( network_id ) ) db . DBSession . flush ( ) return 'OK' | Purge any deleted nodes links resourcegroups and scenarios in a given network |
45,980 | def get_all_resource_attributes_in_network ( attr_id , network_id , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) try : a = db . DBSession . query ( Attr ) . filter ( Attr . id == attr_id ) . one ( ) except NoResultFound : raise HydraError ( "Attribute %s not found" % ( attr_id , ) ) ra_qry = db . DBSession . query ( ResourceAttr ) . filter ( ResourceAttr . attr_id == attr_id , or_ ( Network . id == network_id , Node . network_id == network_id , Link . network_id == network_id , ResourceGroup . network_id == network_id ) ) . outerjoin ( 'node' ) . outerjoin ( 'link' ) . outerjoin ( 'network' ) . outerjoin ( 'resourcegroup' ) . options ( joinedload_all ( 'node' ) ) . options ( joinedload_all ( 'link' ) ) . options ( joinedload_all ( 'resourcegroup' ) ) . options ( joinedload_all ( 'network' ) ) resourceattrs = ra_qry . all ( ) json_ra = [ ] for ra in resourceattrs : ra_j = JSONObject ( ra , extras = { 'node' : JSONObject ( ra . node ) if ra . node else None , 'link' : JSONObject ( ra . link ) if ra . link else None , 'resourcegroup' : JSONObject ( ra . resourcegroup ) if ra . resourcegroup else None , 'network' : JSONObject ( ra . network ) if ra . network else None } ) if ra_j . node is not None : ra_j . resource = ra_j . node elif ra_j . link is not None : ra_j . resource = ra_j . link elif ra_j . resourcegroup is not None : ra_j . resource = ra_j . resourcegroup elif ra . network is not None : ra_j . resource = ra_j . network json_ra . append ( ra_j ) return json_ra | Find every resource attribute in the network matching the supplied attr_id |
45,981 | def copy_data_from_scenario ( resource_attrs , source_scenario_id , target_scenario_id , ** kwargs ) : target_resourcescenarios = db . DBSession . query ( ResourceScenario ) . filter ( ResourceScenario . scenario_id == target_scenario_id , ResourceScenario . resource_attr_id . in_ ( resource_attrs ) ) . all ( ) target_rs_dict = { } for target_rs in target_resourcescenarios : target_rs_dict [ target_rs . resource_attr_id ] = target_rs source_resourcescenarios = db . DBSession . query ( ResourceScenario ) . filter ( ResourceScenario . scenario_id == source_scenario_id , ResourceScenario . resource_attr_id . in_ ( resource_attrs ) ) . all ( ) for source_rs in source_resourcescenarios : target_rs = target_rs_dict . get ( source_rs . resource_attr_id ) if target_rs is not None : target_rs . dataset_id = source_rs . dataset_id else : target_rs = ResourceScenario ( ) target_rs . scenario_id = target_scenario_id target_rs . dataset_id = source_rs . dataset_id target_rs . resource_attr_id = source_rs . resource_attr_id db . DBSession . add ( target_rs ) db . DBSession . flush ( ) return target_resourcescenarios | For a given list of resource attribute IDS copy the dataset_ids from the resource scenarios in the source scenario to those in the target scenario . |
45,982 | def get_scenario ( scenario_id , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) scen_i = _get_scenario ( scenario_id , user_id ) scen_j = JSONObject ( scen_i ) rscen_rs = db . DBSession . query ( ResourceScenario ) . filter ( ResourceScenario . scenario_id == scenario_id ) . options ( joinedload_all ( 'dataset.metadata' ) ) . all ( ) for rs in rscen_rs : rs . resourceattr rs . resourceattr . attr rgi_rs = db . DBSession . query ( ResourceGroupItem ) . filter ( ResourceGroupItem . scenario_id == scenario_id ) . all ( ) scen_j . resourcescenarios = [ ] for rs in rscen_rs : rs_j = JSONObject ( rs , extras = { 'resourceattr' : JSONObject ( rs . resourceattr ) } ) if rs . dataset . check_read_permission ( user_id , do_raise = False ) is False : rs_j . dataset [ 'value' ] = None rs_j . dataset . metadata = JSONObject ( { } ) scen_j . resourcescenarios . append ( rs_j ) scen_j . resourcegroupitems = [ JSONObject ( r ) for r in rgi_rs ] return scen_j | Get the specified scenario |
45,983 | def add_scenario ( network_id , scenario , ** kwargs ) : user_id = int ( kwargs . get ( 'user_id' ) ) log . info ( "Adding scenarios to network" ) _check_network_ownership ( network_id , user_id ) existing_scen = db . DBSession . query ( Scenario ) . filter ( Scenario . name == scenario . name , Scenario . network_id == network_id ) . first ( ) if existing_scen is not None : raise HydraError ( "Scenario with name %s already exists in network %s" % ( scenario . name , network_id ) ) scen = Scenario ( ) scen . name = scenario . name scen . description = scenario . description scen . layout = scenario . get_layout ( ) scen . network_id = network_id scen . created_by = user_id scen . start_time = str ( timestamp_to_ordinal ( scenario . start_time ) ) if scenario . start_time else None scen . end_time = str ( timestamp_to_ordinal ( scenario . end_time ) ) if scenario . end_time else None scen . time_step = scenario . time_step scen . resourcescenarios = [ ] scen . resourcegroupitems = [ ] if scenario . id < 0 : scenario . id = None if scenario . resourcescenarios is not None : all_data = [ r . dataset for r in scenario . resourcescenarios ] datasets = data . _bulk_insert_data ( all_data , user_id = user_id ) resource_attr_ids = [ r . resource_attr_id for r in scenario . resourcescenarios ] for i , ra_id in enumerate ( resource_attr_ids ) : rs_i = ResourceScenario ( ) rs_i . resource_attr_id = ra_id rs_i . dataset_id = datasets [ i ] . id rs_i . scenario_id = scen . id rs_i . dataset = datasets [ i ] scen . resourcescenarios . append ( rs_i ) if scenario . resourcegroupitems is not None : for group_item in scenario . resourcegroupitems : group_item_i = ResourceGroupItem ( ) group_item_i . scenario_id = scen . id group_item_i . group_id = group_item . group_id group_item_i . ref_key = group_item . ref_key if group_item . ref_key == 'NODE' : group_item_i . node_id = group_item . ref_id elif group_item . ref_key == 'LINK' : group_item_i . link_id = group_item . ref_id elif group_item . ref_key == 'GROUP' : group_item_i . subgroup_id = group_item . ref_id scen . resourcegroupitems . append ( group_item_i ) db . DBSession . add ( scen ) db . DBSession . flush ( ) return scen | Add a scenario to a specified network . |
45,984 | def update_scenario ( scenario , update_data = True , update_groups = True , flush = True , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) scen = _get_scenario ( scenario . id , user_id ) if scen . locked == 'Y' : raise PermissionError ( 'Scenario is locked. Unlock before editing.' ) start_time = None if isinstance ( scenario . start_time , float ) : start_time = six . text_type ( scenario . start_time ) else : start_time = timestamp_to_ordinal ( scenario . start_time ) if start_time is not None : start_time = six . text_type ( start_time ) end_time = None if isinstance ( scenario . end_time , float ) : end_time = six . text_type ( scenario . end_time ) else : end_time = timestamp_to_ordinal ( scenario . end_time ) if end_time is not None : end_time = six . text_type ( end_time ) scen . name = scenario . name scen . description = scenario . description scen . layout = scenario . get_layout ( ) scen . start_time = start_time scen . end_time = end_time scen . time_step = scenario . time_step if scenario . resourcescenarios == None : scenario . resourcescenarios = [ ] if scenario . resourcegroupitems == None : scenario . resourcegroupitems = [ ] scen . resourcescenarios if update_data is True : datasets = [ rs . dataset for rs in scenario . resourcescenarios ] updated_datasets = data . _bulk_insert_data ( datasets , user_id , kwargs . get ( 'app_name' ) ) for i , r_scen in enumerate ( scenario . resourcescenarios ) : _update_resourcescenario ( scen , r_scen , dataset = updated_datasets [ i ] , user_id = user_id , source = kwargs . get ( 'app_name' ) ) scen . resourcegroupitems if update_groups is True : for group_item in scenario . resourcegroupitems : _add_resourcegroupitem ( group_item , scenario . id ) if flush is True : db . DBSession . flush ( ) return scen | Update a single scenario as all resources already exist there is no need to worry about negative IDS |
45,985 | def _get_as_obj ( obj_dict , name ) : if obj_dict . get ( '_sa_instance_state' ) : del obj_dict [ '_sa_instance_state' ] obj = namedtuple ( name , tuple ( obj_dict . keys ( ) ) ) for k , v in obj_dict . items ( ) : setattr ( obj , k , v ) log . info ( "%s = %s" , k , getattr ( obj , k ) ) return obj | Turn a dictionary into a named tuple so it can be passed into the constructor of a complex model generator . |
45,986 | def get_resource_scenario ( resource_attr_id , scenario_id , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) _get_scenario ( scenario_id , user_id ) try : rs = db . DBSession . query ( ResourceScenario ) . filter ( ResourceScenario . resource_attr_id == resource_attr_id , ResourceScenario . scenario_id == scenario_id ) . options ( joinedload_all ( 'dataset' ) ) . options ( joinedload_all ( 'dataset.metadata' ) ) . one ( ) return rs except NoResultFound : raise ResourceNotFoundError ( "resource scenario for %s not found in scenario %s" % ( resource_attr_id , scenario_id ) ) | Get the resource scenario object for a given resource atttribute and scenario . This is done when you know the attribute resource and scenario and want to get the value associated with it . |
45,987 | def bulk_update_resourcedata ( scenario_ids , resource_scenarios , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) res = None res = { } net_ids = db . DBSession . query ( Scenario . network_id ) . filter ( Scenario . id . in_ ( scenario_ids ) ) . all ( ) if len ( set ( net_ids ) ) != 1 : raise HydraError ( "Scenario IDS are not in the same network" ) for scenario_id in scenario_ids : _check_can_edit_scenario ( scenario_id , kwargs [ 'user_id' ] ) scen_i = _get_scenario ( scenario_id , user_id ) res [ scenario_id ] = [ ] for rs in resource_scenarios : if rs . dataset is not None : updated_rs = _update_resourcescenario ( scen_i , rs , user_id = user_id , source = kwargs . get ( 'app_name' ) ) res [ scenario_id ] . append ( updated_rs ) else : _delete_resourcescenario ( scenario_id , rs . resource_attr_id ) db . DBSession . flush ( ) return res | Update the data associated with a list of scenarios . |
45,988 | def update_resourcedata ( scenario_id , resource_scenarios , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) res = None _check_can_edit_scenario ( scenario_id , kwargs [ 'user_id' ] ) scen_i = _get_scenario ( scenario_id , user_id ) res = [ ] for rs in resource_scenarios : if rs . dataset is not None : updated_rs = _update_resourcescenario ( scen_i , rs , user_id = user_id , source = kwargs . get ( 'app_name' ) ) res . append ( updated_rs ) else : _delete_resourcescenario ( scenario_id , rs . resource_attr_id ) db . DBSession . flush ( ) return res | Update the data associated with a scenario . Data missing from the resource scenario will not be removed from the scenario . Use the remove_resourcedata for this task . |
45,989 | def delete_resource_scenario ( scenario_id , resource_attr_id , quiet = False , ** kwargs ) : _check_can_edit_scenario ( scenario_id , kwargs [ 'user_id' ] ) _delete_resourcescenario ( scenario_id , resource_attr_id , suppress_error = quiet ) | Remove the data associated with a resource in a scenario . |
45,990 | def delete_resourcedata ( scenario_id , resource_scenario , quiet = False , ** kwargs ) : _check_can_edit_scenario ( scenario_id , kwargs [ 'user_id' ] ) _delete_resourcescenario ( scenario_id , resource_scenario . resource_attr_id , suppress_error = quiet ) | Remove the data associated with a resource in a scenario . The quiet parameter indicates whether an non - existent RS should throw an error . |
45,991 | def _update_resourcescenario ( scenario , resource_scenario , dataset = None , new = False , user_id = None , source = None ) : if scenario is None : scenario = db . DBSession . query ( Scenario ) . filter ( Scenario . id == 1 ) . one ( ) ra_id = resource_scenario . resource_attr_id log . debug ( "Assigning resource attribute: %s" , ra_id ) try : r_scen_i = db . DBSession . query ( ResourceScenario ) . filter ( ResourceScenario . scenario_id == scenario . id , ResourceScenario . resource_attr_id == ra_id ) . one ( ) except NoResultFound as e : log . info ( "Creating new RS" ) r_scen_i = ResourceScenario ( ) r_scen_i . resource_attr_id = resource_scenario . resource_attr_id r_scen_i . scenario_id = scenario . id r_scen_i . scenario = scenario db . DBSession . add ( r_scen_i ) if scenario . locked == 'Y' : log . info ( "Scenario %s is locked" , scenario . id ) return r_scen_i if dataset is not None : r_scen_i . dataset = dataset return r_scen_i dataset = resource_scenario . dataset value = dataset . parse_value ( ) log . info ( "Assigning %s to resource attribute: %s" , value , ra_id ) if value is None : log . info ( "Cannot set data on resource attribute %s" , ra_id ) return None metadata = dataset . get_metadata_as_dict ( source = source , user_id = user_id ) data_unit_id = dataset . unit_id data_hash = dataset . get_hash ( value , metadata ) assign_value ( r_scen_i , dataset . type . lower ( ) , value , data_unit_id , dataset . name , metadata = metadata , data_hash = data_hash , user_id = user_id , source = source ) return r_scen_i | Insert or Update the value of a resource s attribute by first getting the resource then parsing the input data then assigning the value . |
45,992 | def assign_value ( rs , data_type , val , unit_id , name , metadata = { } , data_hash = None , user_id = None , source = None ) : log . debug ( "Assigning value %s to rs %s in scenario %s" , name , rs . resource_attr_id , rs . scenario_id ) if rs . scenario . locked == 'Y' : raise PermissionError ( "Cannot assign value. Scenario %s is locked" % ( rs . scenario_id ) ) update_dataset = False if rs . dataset is not None : if rs . dataset . hash == data_hash : log . debug ( "Dataset has not changed. Returning." ) return connected_rs = db . DBSession . query ( ResourceScenario ) . filter ( ResourceScenario . dataset_id == rs . dataset . id ) . all ( ) if len ( connected_rs ) == 0 : update_dataset = True if len ( connected_rs ) == 1 : if connected_rs [ 0 ] . scenario_id == rs . scenario_id and connected_rs [ 0 ] . resource_attr_id == rs . resource_attr_id : update_dataset = True else : update_dataset = False if update_dataset is True : log . info ( "Updating dataset '%s'" , name ) dataset = data . update_dataset ( rs . dataset . id , name , data_type , val , unit_id , metadata , flush = False , ** dict ( user_id = user_id ) ) rs . dataset = dataset rs . dataset_id = dataset . id log . info ( "Set RS dataset id to %s" % dataset . id ) else : log . info ( "Creating new dataset %s in scenario %s" , name , rs . scenario_id ) dataset = data . add_dataset ( data_type , val , unit_id , metadata = metadata , name = name , ** dict ( user_id = user_id ) ) rs . dataset = dataset rs . source = source db . DBSession . flush ( ) | Insert or update a piece of data in a scenario . If the dataset is being shared by other resource scenarios a new dataset is inserted . If the dataset is ONLY being used by the resource scenario in question the dataset is updated to avoid unnecessary duplication . |
45,993 | def add_data_to_attribute ( scenario_id , resource_attr_id , dataset , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) _check_can_edit_scenario ( scenario_id , user_id ) scenario_i = _get_scenario ( scenario_id , user_id ) try : r_scen_i = db . DBSession . query ( ResourceScenario ) . filter ( ResourceScenario . scenario_id == scenario_id , ResourceScenario . resource_attr_id == resource_attr_id ) . one ( ) log . info ( "Existing resource scenario found for %s in scenario %s" , resource_attr_id , scenario_id ) except NoResultFound : log . info ( "No existing resource scenarios found for %s in scenario %s. Adding a new one." , resource_attr_id , scenario_id ) r_scen_i = ResourceScenario ( ) r_scen_i . scenario_id = scenario_id r_scen_i . resource_attr_id = resource_attr_id scenario_i . resourcescenarios . append ( r_scen_i ) data_type = dataset . type . lower ( ) value = dataset . parse_value ( ) dataset_metadata = dataset . get_metadata_as_dict ( user_id = kwargs . get ( 'user_id' ) , source = kwargs . get ( 'source' ) ) if value is None : raise HydraError ( "Cannot set value to attribute. " "No value was sent with dataset %s" , dataset . id ) data_hash = dataset . get_hash ( value , dataset_metadata ) assign_value ( r_scen_i , data_type , value , dataset . unit_id , dataset . name , metadata = dataset_metadata , data_hash = data_hash , user_id = user_id ) db . DBSession . flush ( ) return r_scen_i | Add data to a resource scenario outside of a network update |
45,994 | def get_scenario_data ( scenario_id , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) scenario_data = db . DBSession . query ( Dataset ) . filter ( Dataset . id == ResourceScenario . dataset_id , ResourceScenario . scenario_id == scenario_id ) . options ( joinedload_all ( 'metadata' ) ) . distinct ( ) . all ( ) for sd in scenario_data : if sd . hidden == 'Y' : try : sd . check_read_permission ( user_id ) except : sd . value = None sd . metadata = [ ] db . DBSession . expunge_all ( ) log . info ( "Retrieved %s datasets" , len ( scenario_data ) ) return scenario_data | Get all the datasets from the group with the specified name |
45,995 | def get_attribute_data ( attr_ids , node_ids , ** kwargs ) : node_attrs = db . DBSession . query ( ResourceAttr ) . options ( joinedload_all ( 'attr' ) ) . filter ( ResourceAttr . node_id . in_ ( node_ids ) , ResourceAttr . attr_id . in_ ( attr_ids ) ) . all ( ) ra_ids = [ ] for ra in node_attrs : ra_ids . append ( ra . id ) resource_scenarios = db . DBSession . query ( ResourceScenario ) . filter ( ResourceScenario . resource_attr_id . in_ ( ra_ids ) ) . options ( joinedload ( 'resourceattr' ) ) . options ( joinedload_all ( 'dataset.metadata' ) ) . order_by ( ResourceScenario . scenario_id ) . all ( ) for rs in resource_scenarios : if rs . dataset . hidden == 'Y' : try : rs . dataset . check_read_permission ( kwargs . get ( 'user_id' ) ) except : rs . dataset . value = None db . DBSession . expunge ( rs ) return node_attrs , resource_scenarios | For a given attribute or set of attributes return all the resources and resource scenarios in the network |
45,996 | def get_resource_data ( ref_key , ref_id , scenario_id , type_id = None , expunge_session = True , ** kwargs ) : user_id = kwargs . get ( 'user_id' ) resource_data_qry = db . DBSession . query ( ResourceScenario ) . filter ( ResourceScenario . dataset_id == Dataset . id , ResourceAttr . id == ResourceScenario . resource_attr_id , ResourceScenario . scenario_id == scenario_id , ResourceAttr . ref_key == ref_key , or_ ( ResourceAttr . network_id == ref_id , ResourceAttr . node_id == ref_id , ResourceAttr . link_id == ref_id , ResourceAttr . group_id == ref_id ) ) . distinct ( ) . options ( joinedload ( 'resourceattr' ) ) . options ( joinedload_all ( 'dataset.metadata' ) ) . order_by ( ResourceAttr . attr_is_var ) if type_id is not None : attr_ids = [ ] rs = db . DBSession . query ( TypeAttr ) . filter ( TypeAttr . type_id == type_id ) . all ( ) for r in rs : attr_ids . append ( r . attr_id ) resource_data_qry = resource_data_qry . filter ( ResourceAttr . attr_id . in_ ( attr_ids ) ) resource_data = resource_data_qry . all ( ) for rs in resource_data : if rs . dataset . hidden == 'Y' : try : rs . dataset . check_read_permission ( user_id ) except : rs . dataset . value = None if expunge_session == True : db . DBSession . expunge_all ( ) return resource_data | Get all the resource scenarios for a given resource in a given scenario . If type_id is specified only return the resource scenarios for the attributes within the type . |
45,997 | def get_resourcegroupitems ( group_id , scenario_id , ** kwargs ) : rgi_qry = db . DBSession . query ( ResourceGroupItem ) . filter ( ResourceGroupItem . scenario_id == scenario_id ) if group_id is not None : rgi_qry = rgi_qry . filter ( ResourceGroupItem . group_id == group_id ) rgi = rgi_qry . all ( ) return rgi | Get all the items in a group in a scenario . If group_id is None return all items across all groups in the scenario . |
45,998 | def delete_resourcegroupitems ( scenario_id , item_ids , ** kwargs ) : user_id = int ( kwargs . get ( 'user_id' ) ) _get_scenario ( scenario_id , user_id ) for item_id in item_ids : rgi = db . DBSession . query ( ResourceGroupItem ) . filter ( ResourceGroupItem . id == item_id ) . one ( ) db . DBSession . delete ( rgi ) db . DBSession . flush ( ) | Delete specified items in a group in a scenario . |
45,999 | def empty_group ( group_id , scenario_id , ** kwargs ) : user_id = int ( kwargs . get ( 'user_id' ) ) _get_scenario ( scenario_id , user_id ) rgi = db . DBSession . query ( ResourceGroupItem ) . filter ( ResourceGroupItem . group_id == group_id ) . filter ( ResourceGroupItem . scenario_id == scenario_id ) . all ( ) rgi . delete ( ) | Delete all itemas in a group in a scenario . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.