idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
38,800 | def subnet_distance ( self ) : return [ ( Element . from_href ( entry . get ( 'subnet' ) ) , entry . get ( 'distance' ) ) for entry in self . data . get ( 'distance_entry' ) ] | Specific subnet administrative distances |
38,801 | def create ( cls , name , neighbor_as , neighbor_ip , neighbor_port = 179 , comment = None ) : json = { 'name' : name , 'neighbor_ip' : neighbor_ip , 'neighbor_port' : neighbor_port , 'comment' : comment } neighbor_as_ref = element_resolver ( neighbor_as ) json . update ( neighbor_as = neighbor_as_ref ) return ElementCreator ( cls , json ) | Create an external BGP Peer . |
38,802 | def create ( cls , name , connection_profile_ref = None , md5_password = None , local_as_option = 'not_set' , max_prefix_option = 'not_enabled' , send_community = 'no' , connected_check = 'disabled' , orf_option = 'disabled' , next_hop_self = True , override_capability = False , dont_capability_negotiate = False , remote_private_as = False , route_reflector_client = False , soft_reconfiguration = True , ttl_option = 'disabled' , comment = None ) : json = { 'name' : name , 'local_as_option' : local_as_option , 'max_prefix_option' : max_prefix_option , 'send_community' : send_community , 'connected_check' : connected_check , 'orf_option' : orf_option , 'next_hop_self' : next_hop_self , 'override_capability' : override_capability , 'dont_capability_negotiate' : dont_capability_negotiate , 'soft_reconfiguration' : soft_reconfiguration , 'remove_private_as' : remote_private_as , 'route_reflector_client' : route_reflector_client , 'ttl_option' : ttl_option , 'comment' : comment } if md5_password : json . update ( md5_password = md5_password ) connection_profile_ref = element_resolver ( connection_profile_ref ) or BGPConnectionProfile ( 'Default BGP Connection Profile' ) . href json . update ( connection_profile = connection_profile_ref ) return ElementCreator ( cls , json ) | Create a new BGPPeering configuration . |
38,803 | def create ( cls , name , md5_password = None , connect_retry = 120 , session_hold_timer = 180 , session_keep_alive = 60 ) : json = { 'name' : name , 'connect' : connect_retry , 'session_hold_timer' : session_hold_timer , 'session_keep_alive' : session_keep_alive } if md5_password : json . update ( md5_password = md5_password ) return ElementCreator ( cls , json ) | Create a new BGP Connection Profile . |
38,804 | def create ( self , name , sources = None , destinations = None , services = None , dynamic_src_nat = None , dynamic_src_nat_ports = ( 1024 , 65535 ) , static_src_nat = None , static_dst_nat = None , static_dst_nat_ports = None , is_disabled = False , used_on = None , add_pos = None , after = None , before = None , comment = None ) : rule_values = self . update_targets ( sources , destinations , services ) rule_values . update ( name = name , comment = comment ) rule_values . update ( is_disabled = is_disabled ) rule_values . update ( used_on = element_resolver ( used_on ) if used_on else used_on ) if dynamic_src_nat : nat = DynamicSourceNAT ( ) start_port , end_port = dynamic_src_nat_ports nat . update_field ( dynamic_src_nat , start_port = start_port , end_port = end_port ) rule_values . update ( options = nat ) elif static_src_nat : sources = rule_values [ 'sources' ] if 'any' in sources or 'none' in sources : raise InvalidRuleValue ( 'Source field cannot be none or any for ' 'static source NAT.' ) nat = StaticSourceNAT ( ) nat . update_field ( static_src_nat , original_value = sources . get ( 'src' ) [ 0 ] ) rule_values . update ( options = nat ) if static_dst_nat : destinations = rule_values [ 'destinations' ] if 'any' in destinations or 'none' in destinations : raise InvalidRuleValue ( 'Destination field cannot be none or any for ' 'destination NAT.' ) nat = StaticDestNAT ( ) original_port , translated_port = None , None if static_dst_nat_ports : original_port , translated_port = static_dst_nat_ports nat . update_field ( static_dst_nat , original_value = destinations . get ( 'dst' ) [ 0 ] , original_port = original_port , translated_port = translated_port ) rule_values . setdefault ( 'options' , { } ) . update ( nat ) if 'options' not in rule_values : rule_values . update ( options = LogOptions ( ) ) params = None href = self . href if add_pos is not None : href = self . add_at_position ( add_pos ) elif before or after : params = self . add_before_after ( before , after ) return ElementCreator ( self . __class__ , exception = CreateRuleFailed , href = href , params = params , json = rule_values ) | Create a NAT rule . |
38,805 | def request ( self , uri , method = 'GET' , body = None , headers = None , ** kwargs ) : _credential_refresh_attempt = kwargs . pop ( '_credential_refresh_attempt' , 0 ) request_headers = headers . copy ( ) if headers is not None else { } self . credentials . before_request ( self . _request , method , uri , request_headers ) body_stream_position = None if all ( getattr ( body , stream_prop , None ) for stream_prop in _STREAM_PROPERTIES ) : body_stream_position = body . tell ( ) response , content = self . http . request ( uri , method , body = body , headers = request_headers , ** kwargs ) if ( response . status in self . _refresh_status_codes and _credential_refresh_attempt < self . _max_refresh_attempts ) : _LOGGER . info ( 'Refreshing credentials due to a %s response. Attempt %s/%s.' , response . status , _credential_refresh_attempt + 1 , self . _max_refresh_attempts ) self . credentials . refresh ( self . _request ) if body_stream_position is not None : body . seek ( body_stream_position ) return self . request ( uri , method , body = body , headers = headers , _credential_refresh_attempt = _credential_refresh_attempt + 1 , ** kwargs ) return response , content | Implementation of httplib2 s Http . request . |
38,806 | def connect ( self , broker , port = 1883 , client_id = "" , clean_session = True ) : logger . info ( 'Connecting to %s at port %s' % ( broker , port ) ) self . _connected = False self . _unexpected_disconnect = False self . _mqttc = mqtt . Client ( client_id , clean_session ) self . _mqttc . on_connect = self . _on_connect self . _mqttc . on_disconnect = self . _on_disconnect if self . _username : self . _mqttc . username_pw_set ( self . _username , self . _password ) self . _mqttc . connect ( broker , int ( port ) ) timer_start = time . time ( ) while time . time ( ) < timer_start + self . _loop_timeout : if self . _connected or self . _unexpected_disconnect : break self . _mqttc . loop ( ) if self . _unexpected_disconnect : raise RuntimeError ( "The client disconnected unexpectedly" ) logger . debug ( 'client_id: %s' % self . _mqttc . _client_id ) return self . _mqttc | Connect to an MQTT broker . This is a pre - requisite step for publish and subscribe keywords . |
38,807 | def publish ( self , topic , message = None , qos = 0 , retain = False ) : logger . info ( 'Publish topic: %s, message: %s, qos: %s, retain: %s' % ( topic , message , qos , retain ) ) self . _mid = - 1 self . _mqttc . on_publish = self . _on_publish result , mid = self . _mqttc . publish ( topic , message , int ( qos ) , retain ) if result != 0 : raise RuntimeError ( 'Error publishing: %s' % result ) timer_start = time . time ( ) while time . time ( ) < timer_start + self . _loop_timeout : if mid == self . _mid : break self . _mqttc . loop ( ) if mid != self . _mid : logger . warn ( 'mid wasn\'t matched: %s' % mid ) | Publish a message to a topic with specified qos and retained flag . It is required that a connection has been established using Connect keyword before using this keyword . |
38,808 | def subscribe ( self , topic , qos , timeout = 1 , limit = 1 ) : seconds = convert_time ( timeout ) self . _messages [ topic ] = [ ] limit = int ( limit ) self . _subscribed = False logger . info ( 'Subscribing to topic: %s' % topic ) self . _mqttc . on_subscribe = self . _on_subscribe self . _mqttc . subscribe ( str ( topic ) , int ( qos ) ) self . _mqttc . on_message = self . _on_message_list if seconds == 0 : logger . info ( 'Starting background loop' ) self . _background_mqttc = self . _mqttc self . _background_mqttc . loop_start ( ) return self . _messages [ topic ] timer_start = time . time ( ) while time . time ( ) < timer_start + seconds : if limit == 0 or len ( self . _messages [ topic ] ) < limit : self . _mqttc . loop ( ) else : time . sleep ( 1 ) break return self . _messages [ topic ] | Subscribe to a topic and return a list of message payloads received within the specified time . |
38,809 | def listen ( self , topic , timeout = 1 , limit = 1 ) : if not self . _subscribed : logger . warn ( 'Cannot listen when not subscribed to a topic' ) return [ ] if topic not in self . _messages : logger . warn ( 'Cannot listen when not subscribed to topic: %s' % topic ) return [ ] if limit != 0 and len ( self . _messages [ topic ] ) >= limit : messages = self . _messages [ topic ] [ : ] self . _messages [ topic ] = [ ] return messages [ - limit : ] seconds = convert_time ( timeout ) limit = int ( limit ) logger . info ( 'Listening on topic: %s' % topic ) timer_start = time . time ( ) while time . time ( ) < timer_start + seconds : if limit == 0 or len ( self . _messages [ topic ] ) < limit : if self . _background_mqttc : time . sleep ( 1 ) else : self . _mqttc . loop ( ) else : time . sleep ( 1 ) break messages = self . _messages [ topic ] [ : ] self . _messages [ topic ] = [ ] return messages [ - limit : ] if limit != 0 else messages | Listen to a topic and return a list of message payloads received within the specified time . Requires an async Subscribe to have been called previously . |
38,810 | def subscribe_and_validate ( self , topic , qos , payload , timeout = 1 ) : seconds = convert_time ( timeout ) self . _verified = False logger . info ( 'Subscribing to topic: %s' % topic ) self . _mqttc . subscribe ( str ( topic ) , int ( qos ) ) self . _payload = str ( payload ) self . _mqttc . on_message = self . _on_message timer_start = time . time ( ) while time . time ( ) < timer_start + seconds : if self . _verified : break self . _mqttc . loop ( ) if not self . _verified : raise AssertionError ( "The expected payload didn't arrive in the topic" ) | Subscribe to a topic and validate that the specified payload is received within timeout . It is required that a connection has been established using Connect keyword . The payload can be specified as a python regular expression . If the specified payload is not received within timeout an AssertionError is thrown . |
38,811 | def unsubscribe ( self , topic ) : try : tmp = self . _mqttc except AttributeError : logger . info ( 'No MQTT Client instance found so nothing to unsubscribe from.' ) return if self . _background_mqttc : logger . info ( 'Closing background loop' ) self . _background_mqttc . loop_stop ( ) self . _background_mqttc = None if topic in self . _messages : del self . _messages [ topic ] logger . info ( 'Unsubscribing from topic: %s' % topic ) self . _unsubscribed = False self . _mqttc . on_unsubscribe = self . _on_unsubscribe self . _mqttc . unsubscribe ( str ( topic ) ) timer_start = time . time ( ) while ( not self . _unsubscribed and time . time ( ) < timer_start + self . _loop_timeout ) : self . _mqttc . loop ( ) if not self . _unsubscribed : logger . warn ( 'Client didn\'t receive an unsubscribe callback' ) | Unsubscribe the client from the specified topic . |
38,812 | def disconnect ( self ) : try : tmp = self . _mqttc except AttributeError : logger . info ( 'No MQTT Client instance found so nothing to disconnect from.' ) return self . _disconnected = False self . _unexpected_disconnect = False self . _mqttc . on_disconnect = self . _on_disconnect self . _mqttc . disconnect ( ) timer_start = time . time ( ) while time . time ( ) < timer_start + self . _loop_timeout : if self . _disconnected or self . _unexpected_disconnect : break self . _mqttc . loop ( ) if self . _unexpected_disconnect : raise RuntimeError ( "The client disconnected unexpectedly" ) | Disconnect from MQTT Broker . |
38,813 | def user_to_request ( handler ) : @ wraps ( handler ) async def decorator ( * args ) : request = _get_request ( args ) request [ cfg . REQUEST_USER_KEY ] = await get_cur_user ( request ) return await handler ( * args ) return decorator | Add user to request if user logged in |
38,814 | def add_synonym ( self , other ) : self . synonyms . extend ( other . synonyms ) other . synonyms = self . synonyms | Every word in a group of synonyms shares the same list . |
38,815 | def _split_dict ( dic ) : keys = sorted ( dic . keys ( ) ) return keys , [ dic [ k ] for k in keys ] | Split dict into sorted keys and values |
38,816 | def play ( seed = None ) : global _game from . game import Game from . prompt import install_words _game = Game ( seed ) load_advent_dat ( _game ) install_words ( _game ) _game . start ( ) print ( _game . output [ : - 1 ] ) | Turn the Python prompt into an Adventure game . |
38,817 | def write ( self , more ) : if more : self . output += str ( more ) . upper ( ) self . output += '\n' | Append the Unicode representation of s to our output . |
38,818 | def yesno ( self , s , yesno_callback , casual = False ) : self . write ( s ) self . yesno_callback = yesno_callback self . yesno_casual = casual | Ask a question and prepare to receive a yes - or - no answer . |
38,819 | def start2 ( self , yes ) : if yes : self . write_message ( 1 ) self . hints [ 3 ] . used = True self . lamp_turns = 1000 self . oldloc2 = self . oldloc = self . loc = self . rooms [ 1 ] self . dwarves = [ Dwarf ( self . rooms [ n ] ) for n in ( 19 , 27 , 33 , 44 , 64 ) ] self . pirate = Pirate ( self . chest_room ) treasures = self . treasures self . treasures_not_found = len ( treasures ) for treasure in treasures : treasure . prop = - 1 self . describe_location ( ) | Display instructions if the user wants them . |
38,820 | def do_command ( self , words ) : self . output = '' self . _do_command ( words ) return self . output | Parse and act upon the command in the list of strings words . |
38,821 | def resume ( self , obj ) : if isinstance ( obj , str ) : savefile = open ( obj , 'rb' ) else : savefile = obj game = pickle . loads ( zlib . decompress ( savefile . read ( ) ) ) if savefile is not obj : savefile . close ( ) game . random_generator = random . Random ( ) game . random_generator . setstate ( game . random_state ) del game . random_state return game | Returns an Adventure game saved to the given file . |
38,822 | def parse ( data , datafile ) : data . _last_travel = [ 0 , [ 0 ] ] while True : section_number = int ( datafile . readline ( ) ) if not section_number : break store = globals ( ) . get ( 'section%d' % section_number ) while True : fields = [ ( int ( field ) if field . lstrip ( '-' ) . isdigit ( ) else field ) for field in datafile . readline ( ) . strip ( ) . split ( '\t' ) ] if fields [ 0 ] == - 1 : break store ( data , * fields ) del data . _last_travel del data . _object data . object_list = sorted ( set ( data . objects . values ( ) ) , key = attrgetter ( 'n' ) ) for obj in data . object_list : name = obj . names [ 0 ] if hasattr ( data , name ) : name = name + '2' setattr ( data , name , obj ) return data | Read the Adventure data file and return a Data object . |
38,823 | def _map_smtp_headers_to_api_parameters ( self , email_message ) : api_data = [ ] for smtp_key , api_transformer in six . iteritems ( self . _headers_map ) : data_to_transform = email_message . extra_headers . pop ( smtp_key , None ) if data_to_transform is not None : if isinstance ( data_to_transform , ( list , tuple ) ) : for data in data_to_transform : api_data . append ( ( api_transformer [ 0 ] , api_transformer [ 1 ] ( data ) ) ) elif isinstance ( data_to_transform , dict ) : for data in six . iteritems ( data_to_transform ) : api_data . append ( api_transformer ( data ) ) else : api_data . append ( ( api_transformer [ 0 ] , api_transformer [ 1 ] ( data_to_transform ) ) ) return api_data | Map the values passed in SMTP headers to API - ready 2 - item tuples present in HEADERS_MAP |
38,824 | def student_view ( self , context = None ) : context = context or { } context . update ( { 'display_name' : self . display_name , 'image_url' : self . image_url , 'thumbnail_url' : self . thumbnail_url or self . image_url , 'description' : self . description , 'xblock_id' : text_type ( self . scope_ids . usage_id ) , 'alt_text' : self . alt_text or self . display_name , } ) fragment = self . build_fragment ( template = 'view.html' , context = context , css = [ 'view.less.css' , URL_FONT_AWESOME_CSS , ] , js = [ 'draggabilly.pkgd.js' , 'view.js' , ] , js_init = 'ImageModalView' , ) return fragment | Build the fragment for the default student view |
38,825 | def build_fragment ( self , template = '' , context = None , css = None , js = None , js_init = None , ) : template = 'templates/' + template context = context or { } css = css or [ ] js = js or [ ] rendered_template = '' if template : rendered_template = self . loader . render_django_template ( template , context = Context ( context ) , i18n_service = self . runtime . service ( self , 'i18n' ) , ) fragment = Fragment ( rendered_template ) for item in css : if item . startswith ( '/' ) : url = item else : item = 'public/' + item url = self . runtime . local_resource_url ( self , item ) fragment . add_css_url ( url ) for item in js : item = 'public/' + item url = self . runtime . local_resource_url ( self , item ) fragment . add_javascript_url ( url ) if js_init : fragment . initialize_js ( js_init ) return fragment | Creates a fragment for display . |
38,826 | def _parse_title ( file_path ) : title = file_path title = title . split ( '/' ) [ - 1 ] title = '.' . join ( title . split ( '.' ) [ : - 1 ] ) title = ' ' . join ( title . split ( '-' ) ) title = ' ' . join ( [ word . capitalize ( ) for word in title . split ( ' ' ) ] ) return title | Parse a title from a file name |
38,827 | def _read_files ( files ) : file_contents = [ ( _parse_title ( file_path ) , _read_file ( file_path ) , ) for file_path in files ] return file_contents | Read the contents of a list of files |
38,828 | def _find_files ( directory ) : pattern = "{directory}/*.xml" . format ( directory = directory , ) files = glob ( pattern ) return files | Find XML files in the directory |
38,829 | def workbench_scenarios ( cls ) : module = cls . __module__ module = module . split ( '.' ) [ 0 ] directory = pkg_resources . resource_filename ( module , 'scenarios' ) files = _find_files ( directory ) scenarios = _read_files ( files ) return scenarios | Gather scenarios to be displayed in the workbench |
38,830 | def _check_response_code ( response , codes ) : if type ( codes ) == int : codes = [ codes ] if response . status_code not in codes : raise FireCloudServerError ( response . status_code , response . content ) | Throws an exception if the http response is not expected . Can check single integer or list of valid responses . |
38,831 | def list_entity_types ( namespace , workspace ) : headers = _fiss_agent_header ( { "Content-type" : "application/json" } ) uri = "workspaces/{0}/{1}/entities" . format ( namespace , workspace ) return __get ( uri , headers = headers ) | List the entity types present in a workspace . |
38,832 | def upload_entities ( namespace , workspace , entity_data ) : body = urlencode ( { "entities" : entity_data } ) headers = _fiss_agent_header ( { 'Content-type' : "application/x-www-form-urlencoded" } ) uri = "workspaces/{0}/{1}/importEntities" . format ( namespace , workspace ) return __post ( uri , headers = headers , data = body ) | Upload entities from tab - delimited string . |
38,833 | def upload_entities_tsv ( namespace , workspace , entities_tsv ) : if isinstance ( entities_tsv , string_types ) : with open ( entities_tsv , "r" ) as tsv : entity_data = tsv . read ( ) elif isinstance ( entities_tsv , io . StringIO ) : entity_data = entities_tsv . getvalue ( ) else : raise ValueError ( 'Unsupported input type.' ) return upload_entities ( namespace , workspace , entity_data ) | Upload entities from a tsv loadfile . |
38,834 | def copy_entities ( from_namespace , from_workspace , to_namespace , to_workspace , etype , enames , link_existing_entities = False ) : uri = "workspaces/{0}/{1}/entities/copy" . format ( to_namespace , to_workspace ) body = { "sourceWorkspace" : { "namespace" : from_namespace , "name" : from_workspace } , "entityType" : etype , "entityNames" : enames } return __post ( uri , json = body , params = { 'linkExistingEntities' : str ( link_existing_entities ) . lower ( ) } ) | Copy entities between workspaces |
38,835 | def get_entities ( namespace , workspace , etype ) : uri = "workspaces/{0}/{1}/entities/{2}" . format ( namespace , workspace , etype ) return __get ( uri ) | List entities of given type in a workspace . |
38,836 | def get_entities_tsv ( namespace , workspace , etype ) : uri = "workspaces/{0}/{1}/entities/{2}/tsv" . format ( namespace , workspace , etype ) return __get ( uri ) | List entities of given type in a workspace as a TSV . |
38,837 | def get_entity ( namespace , workspace , etype , ename ) : uri = "workspaces/{0}/{1}/entities/{2}/{3}" . format ( namespace , workspace , etype , ename ) return __get ( uri ) | Request entity information . |
38,838 | def get_entities_query ( namespace , workspace , etype , page = 1 , page_size = 100 , sort_direction = "asc" , filter_terms = None ) : params = { "page" : page , "pageSize" : page_size , "sortDirection" : sort_direction } if filter_terms : params [ 'filterTerms' ] = filter_terms uri = "workspaces/{0}/{1}/entityQuery/{2}" . format ( namespace , workspace , etype ) return __get ( uri , params = params ) | Paginated version of get_entities_with_type . |
38,839 | def update_entity ( namespace , workspace , etype , ename , updates ) : headers = _fiss_agent_header ( { "Content-type" : "application/json" } ) uri = "{0}workspaces/{1}/{2}/entities/{3}/{4}" . format ( fcconfig . root_url , namespace , workspace , etype , ename ) return __SESSION . patch ( uri , headers = headers , json = updates ) | Update entity attributes in a workspace . |
38,840 | def list_workspace_configs ( namespace , workspace , allRepos = False ) : uri = "workspaces/{0}/{1}/methodconfigs" . format ( namespace , workspace ) return __get ( uri , params = { 'allRepos' : allRepos } ) | List method configurations in workspace . |
38,841 | def create_workspace_config ( namespace , workspace , body ) : uri = "workspaces/{0}/{1}/methodconfigs" . format ( namespace , workspace ) return __post ( uri , json = body ) | Create method configuration in workspace . |
38,842 | def delete_workspace_config ( namespace , workspace , cnamespace , config ) : uri = "workspaces/{0}/{1}/method_configs/{2}/{3}" . format ( namespace , workspace , cnamespace , config ) return __delete ( uri ) | Delete method configuration in workspace . |
38,843 | def get_workspace_config ( namespace , workspace , cnamespace , config ) : uri = "workspaces/{0}/{1}/method_configs/{2}/{3}" . format ( namespace , workspace , cnamespace , config ) return __get ( uri ) | Get method configuration in workspace . |
38,844 | def overwrite_workspace_config ( namespace , workspace , cnamespace , configname , body ) : headers = _fiss_agent_header ( { "Content-type" : "application/json" } ) uri = "workspaces/{0}/{1}/method_configs/{2}/{3}" . format ( namespace , workspace , cnamespace , configname ) return __put ( uri , headers = headers , json = body ) | Add or overwrite method configuration in workspace . |
38,845 | def update_workspace_config ( namespace , workspace , cnamespace , configname , body ) : uri = "workspaces/{0}/{1}/method_configs/{2}/{3}" . format ( namespace , workspace , cnamespace , configname ) return __post ( uri , json = body ) | Update method configuration in workspace . |
38,846 | def validate_config ( namespace , workspace , cnamespace , config ) : uri = "workspaces/{0}/{1}/method_configs/{2}/{3}/validate" . format ( namespace , workspace , cnamespace , config ) return __get ( uri ) | Get syntax validation for a configuration . |
38,847 | def rename_workspace_config ( namespace , workspace , cnamespace , config , new_namespace , new_name ) : body = { "namespace" : new_namespace , "name" : new_name , "workspaceName" : { "namespace" : namespace , "name" : workspace } } uri = "workspaces/{0}/{1}/method_configs/{2}/{3}/rename" . format ( namespace , workspace , cnamespace , config ) return __post ( uri , json = body ) | Rename a method configuration in a workspace . |
38,848 | def copy_config_from_repo ( namespace , workspace , from_cnamespace , from_config , from_snapshot_id , to_cnamespace , to_config ) : body = { "configurationNamespace" : from_cnamespace , "configurationName" : from_config , "configurationSnapshotId" : from_snapshot_id , "destinationNamespace" : to_cnamespace , "destinationName" : to_config } uri = "workspaces/{0}/{1}/method_configs/copyFromMethodRepo" . format ( namespace , workspace ) return __post ( uri , json = body ) | Copy a method config from the methods repository to a workspace . |
38,849 | def copy_config_to_repo ( namespace , workspace , from_cnamespace , from_config , to_cnamespace , to_config ) : body = { "configurationNamespace" : to_cnamespace , "configurationName" : to_config , "sourceNamespace" : from_cnamespace , "sourceName" : from_config } uri = "workspaces/{0}/{1}/method_configs/copyToMethodRepo" . format ( namespace , workspace ) return __post ( uri , json = body ) | Copy a method config from a workspace to the methods repository . |
38,850 | def get_config_template ( namespace , method , version ) : body = { "methodNamespace" : namespace , "methodName" : method , "methodVersion" : int ( version ) } return __post ( "template" , json = body ) | Get the configuration template for a method . |
38,851 | def get_inputs_outputs ( namespace , method , snapshot_id ) : body = { "methodNamespace" : namespace , "methodName" : method , "methodVersion" : snapshot_id } return __post ( "inputsOutputs" , json = body ) | Get a description of the inputs and outputs for a method . |
38,852 | def get_repository_config ( namespace , config , snapshot_id ) : uri = "configurations/{0}/{1}/{2}" . format ( namespace , config , snapshot_id ) return __get ( uri ) | Get a method configuration from the methods repository . |
38,853 | def get_repository_method ( namespace , method , snapshot_id , wdl_only = False ) : uri = "methods/{0}/{1}/{2}?onlyPayload={3}" . format ( namespace , method , snapshot_id , str ( wdl_only ) . lower ( ) ) return __get ( uri ) | Get a method definition from the method repository . |
38,854 | def delete_repository_method ( namespace , name , snapshot_id ) : uri = "methods/{0}/{1}/{2}" . format ( namespace , name , snapshot_id ) return __delete ( uri ) | Redacts a method and all of its associated configurations . |
38,855 | def delete_repository_config ( namespace , name , snapshot_id ) : uri = "configurations/{0}/{1}/{2}" . format ( namespace , name , snapshot_id ) return __delete ( uri ) | Redacts a configuration and all of its associated configurations . |
38,856 | def get_repository_method_acl ( namespace , method , snapshot_id ) : uri = "methods/{0}/{1}/{2}/permissions" . format ( namespace , method , snapshot_id ) return __get ( uri ) | Get permissions for a method . |
38,857 | def update_repository_method_acl ( namespace , method , snapshot_id , acl_updates ) : uri = "methods/{0}/{1}/{2}/permissions" . format ( namespace , method , snapshot_id ) return __post ( uri , json = acl_updates ) | Set method permissions . |
38,858 | def get_repository_config_acl ( namespace , config , snapshot_id ) : uri = "configurations/{0}/{1}/{2}/permissions" . format ( namespace , config , snapshot_id ) return __get ( uri ) | Get configuration permissions . |
38,859 | def update_repository_config_acl ( namespace , config , snapshot_id , acl_updates ) : uri = "configurations/{0}/{1}/{2}/permissions" . format ( namespace , config , snapshot_id ) return __post ( uri , json = acl_updates ) | Set configuration permissions . |
38,860 | def create_submission ( wnamespace , workspace , cnamespace , config , entity , etype , expression = None , use_callcache = True ) : uri = "workspaces/{0}/{1}/submissions" . format ( wnamespace , workspace ) body = { "methodConfigurationNamespace" : cnamespace , "methodConfigurationName" : config , "entityType" : etype , "entityName" : entity , "useCallCache" : use_callcache } if expression : body [ 'expression' ] = expression return __post ( uri , json = body ) | Submit job in FireCloud workspace . |
38,861 | def abort_submission ( namespace , workspace , submission_id ) : uri = "workspaces/{0}/{1}/submissions/{2}" . format ( namespace , workspace , submission_id ) return __delete ( uri ) | Abort running job in a workspace . |
38,862 | def get_submission ( namespace , workspace , submission_id ) : uri = "workspaces/{0}/{1}/submissions/{2}" . format ( namespace , workspace , submission_id ) return __get ( uri ) | Request submission information . |
38,863 | def get_workflow_metadata ( namespace , workspace , submission_id , workflow_id ) : uri = "workspaces/{0}/{1}/submissions/{2}/workflows/{3}" . format ( namespace , workspace , submission_id , workflow_id ) return __get ( uri ) | Request the metadata for a workflow in a submission . |
38,864 | def get_workflow_outputs ( namespace , workspace , submission_id , workflow_id ) : uri = "workspaces/{0}/{1}/" . format ( namespace , workspace ) uri += "submissions/{0}/workflows/{1}/outputs" . format ( submission_id , workflow_id ) return __get ( uri ) | Request the outputs for a workflow in a submission . |
38,865 | def create_workspace ( namespace , name , authorizationDomain = "" , attributes = None ) : if not attributes : attributes = dict ( ) body = { "namespace" : namespace , "name" : name , "attributes" : attributes } if authorizationDomain : authDomain = [ { "membersGroupName" : authorizationDomain } ] else : authDomain = [ ] body [ "authorizationDomain" ] = authDomain return __post ( "workspaces" , json = body ) | Create a new FireCloud Workspace . |
38,866 | def update_workspace_acl ( namespace , workspace , acl_updates , invite_users_not_found = False ) : uri = "{0}workspaces/{1}/{2}/acl?inviteUsersNotFound={3}" . format ( fcconfig . root_url , namespace , workspace , str ( invite_users_not_found ) . lower ( ) ) headers = _fiss_agent_header ( { "Content-type" : "application/json" } ) return __SESSION . patch ( uri , headers = headers , data = json . dumps ( acl_updates ) ) | Update workspace access control list . |
38,867 | def clone_workspace ( from_namespace , from_workspace , to_namespace , to_workspace , authorizationDomain = "" ) : if authorizationDomain : if isinstance ( authorizationDomain , string_types ) : authDomain = [ { "membersGroupName" : authorizationDomain } ] else : authDomain = [ { "membersGroupName" : authDomain } for authDomain in authorizationDomain ] else : authDomain = [ ] body = { "namespace" : to_namespace , "name" : to_workspace , "attributes" : dict ( ) , "authorizationDomain" : authDomain , } uri = "workspaces/{0}/{1}/clone" . format ( from_namespace , from_workspace ) return __post ( uri , json = body ) | Clone a FireCloud workspace . |
38,868 | def update_workspace_attributes ( namespace , workspace , attrs ) : headers = _fiss_agent_header ( { "Content-type" : "application/json" } ) uri = "{0}workspaces/{1}/{2}/updateAttributes" . format ( fcconfig . root_url , namespace , workspace ) body = json . dumps ( attrs ) return __SESSION . patch ( uri , headers = headers , data = body ) | Update or remove workspace attributes . |
38,869 | def add_user_to_group ( group , role , email ) : uri = "groups/{0}/{1}/{2}" . format ( group , role , email ) return __put ( uri ) | Add a user to a group the caller owns |
38,870 | def remove_user_from_group ( group , role , email ) : uri = "groups/{0}/{1}/{2}" . format ( group , role , email ) return __delete ( uri ) | Remove a user from a group the caller owns |
38,871 | def register_commands ( self , subparsers ) : prsr = subparsers . add_parser ( 'upload' , description = 'Copy the file or directory into the given' ) prsr . add_argument ( 'workspace' , help = 'Workspace name' ) prsr . add_argument ( 'source' , help = 'File or directory to upload' ) prsr . add_argument ( '-s' , '--show' , action = 'store_true' , help = "Show the gsutil command, but don't run it" ) dest_help = 'Destination relative to the bucket root. ' dest_help += 'If omitted the file will be placed in the root directory' prsr . add_argument ( '-d' , '--destination' , help = dest_help ) prsr . set_defaults ( func = upload ) | Add commands to a list of subparsers . This will be called by Fissfc to add additional command targets from this plugin . |
38,872 | def new ( namespace , name , wdl , synopsis , documentation = None , api_url = fapi . PROD_API_ROOT ) : r = fapi . update_workflow ( namespace , name , synopsis , wdl , documentation , api_url ) fapi . _check_response_code ( r , 201 ) d = r . json ( ) return Method ( namespace , name , d [ "snapshotId" ] ) | Create new FireCloud method . |
38,873 | def template ( self ) : r = fapi . get_config_template ( self . namespace , self . name , self . snapshot_id , self . api_url ) fapi . _check_response_code ( r , 200 ) return r . json ( ) | Return a method template for this method . |
38,874 | def inputs_outputs ( self ) : r = fapi . get_inputs_outputs ( self . namespace , self . name , self . snapshot_id , self . api_url ) fapi . _check_response_code ( r , 200 ) return r . json ( ) | Get information on method inputs & outputs . |
38,875 | def acl ( self ) : r = fapi . get_repository_method_acl ( self . namespace , self . name , self . snapshot_id , self . api_url ) fapi . _check_response_code ( r , 200 ) return r . json ( ) | Get the access control list for this method . |
38,876 | def set_acl ( self , role , users ) : acl_updates = [ { "user" : user , "role" : role } for user in users ] r = fapi . update_repository_method_acl ( self . namespace , self . name , self . snapshot_id , acl_updates , self . api_url ) fapi . _check_response_code ( r , 200 ) | Set permissions for this method . |
38,877 | def new ( namespace , name , protected = False , attributes = dict ( ) , api_url = fapi . PROD_API_ROOT ) : r = fapi . create_workspace ( namespace , name , protected , attributes , api_url ) fapi . _check_response_code ( r , 201 ) return Workspace ( namespace , name , api_url ) | Create a new FireCloud workspace . |
38,878 | def refresh ( self ) : r = fapi . get_workspace ( self . namespace , self . name , self . api_url ) fapi . _check_response_code ( r , 200 ) self . data = r . json ( ) return self | Reload workspace metadata from firecloud . |
38,879 | def delete ( self ) : r = fapi . delete_workspace ( self . namespace , self . name ) fapi . _check_response_code ( r , 202 ) | Delete the workspace from FireCloud . |
38,880 | def lock ( self ) : r = fapi . lock_workspace ( self . namespace , self . name , self . api_url ) fapi . _check_response_code ( r , 204 ) self . data [ 'workspace' ] [ 'isLocked' ] = True return self | Lock this Workspace . |
38,881 | def unlock ( self ) : r = fapi . unlock_workspace ( self . namespace , self . name , self . api_url ) fapi . _check_response_code ( r , 204 ) self . data [ 'workspace' ] [ 'isLocked' ] = False return self | Unlock this Workspace . |
38,882 | def update_attribute ( self , attr , value ) : update = [ fapi . _attr_up ( attr , value ) ] r = fapi . update_workspace_attributes ( self . namespace , self . name , update , self . api_url ) fapi . _check_response_code ( r , 200 ) | Set the value of a workspace attribute . |
38,883 | def remove_attribute ( self , attr ) : update = [ fapi . _attr_rem ( attr ) ] r = fapi . update_workspace_attributes ( self . namespace , self . name , update , self . api_url ) self . data [ "workspace" ] [ "attributes" ] . pop ( attr , None ) fapi . _check_response_code ( r , 200 ) | Remove attribute from a workspace . |
38,884 | def import_tsv ( self , tsv_file ) : r = fapi . upload_entities_tsv ( self . namespace , self . name , self . tsv_file , self . api_url ) fapi . _check_response_code ( r , 201 ) | Upload entity data to workspace from tsv loadfile . |
38,885 | def get_entity ( self , etype , entity_id ) : r = fapi . get_entity ( self . namespace , self . name , etype , entity_id , self . api_url ) fapi . _check_response_code ( r , 200 ) dresp = r . json ( ) return Entity ( etype , entity_id , dresp [ 'attributes' ] ) | Return entity in this workspace . |
38,886 | def delete_entity ( self , etype , entity_id ) : r = fapi . delete_entity ( self . namespace , self . name , etype , entity_id , self . api_url ) fapi . _check_response_code ( r , 202 ) | Delete an entity in this workspace . |
38,887 | def import_entities ( self , entities ) : edata = Entity . create_payload ( entities ) r = fapi . upload_entities ( self . namespace , self . name , edata , self . api_url ) fapi . _check_response_code ( r , 201 ) | Upload entity objects . |
38,888 | def create_set ( self , set_id , etype , entities ) : if etype not in { "sample" , "pair" , "participant" } : raise ValueError ( "Unsupported entity type:" + str ( etype ) ) payload = "membership:" + etype + "_set_id\t" + etype + "_id\n" for e in entities : if e . etype != etype : msg = "Entity type '" + e . etype + "' does not match " msg += "set type '" + etype + "'" raise ValueError ( msg ) payload += set_id + '\t' + e . entity_id + '\n' r = fapi . upload_entities ( self . namespace , self . name , payload , self . api_url ) fapi . _check_response_code ( r , 201 ) | Create a set of entities and upload to FireCloud . |
38,889 | def submissions ( self ) : r = fapi . get_submissions ( self . namespace , self . name , self . api_url ) fapi . _check_response_code ( r , 200 ) return r . json ( ) | List job submissions in workspace . |
38,890 | def entity_types ( self ) : r = fapi . get_entity_types ( self . namespace , self . name , self . api_url ) fapi . _check_response_code ( r , 200 ) return r . json ( ) . keys ( ) | List entity types in workspace . |
38,891 | def entities ( self ) : r = fapi . get_entities_with_type ( self . namespace , self . name , self . api_url ) fapi . _check_response_code ( r , 200 ) edicts = r . json ( ) return [ Entity ( e [ 'entityType' ] , e [ 'name' ] , e [ 'attributes' ] ) for e in edicts ] | List all entities in workspace . |
38,892 | def __get_entities ( self , etype ) : r = fapi . get_entities ( self . namespace , self . name , etype , self . api_url ) fapi . _check_response_code ( r , 200 ) return [ Entity ( e [ 'entityType' ] , e [ 'name' ] , e [ 'attributes' ] ) for e in r . json ( ) ] | Helper to get entities for a given type . |
38,893 | def copy_entities ( self , from_namespace , from_workspace , etype , enames ) : r = fapi . copy_entities ( from_namespace , from_workspace , self . namespace , self . name , etype , enames , self . api_url ) fapi . _check_response_code ( r , 201 ) | Copy entities from another workspace . |
38,894 | def configs ( self ) : raise NotImplementedError r = fapi . get_configs ( self . namespace , self . name , self . api_url ) fapi . _check_response_code ( r , 200 ) cdata = r . json ( ) configs = [ ] for c in cdata : cnamespace = c [ 'namespace' ] cname = c [ 'name' ] root_etype = c [ 'rootEntityType' ] method_namespace = c [ 'methodRepoMethod' ] [ 'methodNamespace' ] method_name = c [ 'methodRepoMethod' ] [ 'methodName' ] method_version = c [ 'methodRepoMethod' ] [ 'methodVersion' ] | Get method configurations in a workspace . |
38,895 | def acl ( self ) : r = fapi . get_workspace_acl ( self . namespace , self . name , self . api_url ) fapi . _check_response_code ( r , 200 ) return r . json ( ) | Get the access control list for this workspace . |
38,896 | def clone ( self , to_namespace , to_name ) : r = fapi . clone_workspace ( self . namespace , self . name , to_namespace , to_name , self . api_url ) fapi . _check_response_code ( r , 201 ) return Workspace ( to_namespace , to_name , self . api_url ) | Clone this workspace . |
38,897 | def supervise ( project , workspace , namespace , workflow , sample_sets , recovery_file ) : logging . info ( "Initializing FireCloud Supervisor..." ) logging . info ( "Saving recovery checkpoints to " + recovery_file ) args = { 'project' : project , 'workspace' : workspace , 'namespace' : namespace , 'workflow' : workflow , 'sample_sets' : sample_sets } monitor_data , dependencies = init_supervisor_data ( workflow , sample_sets ) recovery_data = { 'args' : args , 'monitor_data' : monitor_data , 'dependencies' : dependencies } supervise_until_complete ( monitor_data , dependencies , args , recovery_file ) | Supervise submission of jobs from a Firehose - style workflow of workflows |
38,898 | def validate_monitor_tasks ( dependencies , args ) : sup_configs = sorted ( dependencies . keys ( ) ) try : logging . info ( "Validating supervisor data..." ) r = fapi . list_workspace_configs ( args [ 'project' ] , args [ 'workspace' ] ) fapi . _check_response_code ( r , 200 ) space_configs = r . json ( ) space_configs = { c [ "name" ] : c for c in space_configs } r = fapi . list_repository_methods ( ) fapi . _check_response_code ( r , 200 ) repo_methods = r . json ( ) repo_methods = { m [ 'namespace' ] + '/' + m [ 'name' ] + ':' + str ( m [ 'snapshotId' ] ) for m in repo_methods if m [ 'entityType' ] == 'Workflow' } valid = True for config in sup_configs : if config not in space_configs : logging . error ( "No task configuration for " + config + " found in " + args [ 'project' ] + "/" + args [ 'workspace' ] ) valid = False else : m = space_configs [ config ] [ 'methodRepoMethod' ] ref_method = m [ 'methodNamespace' ] + "/" + m [ 'methodName' ] + ":" + str ( m [ 'methodVersion' ] ) if ref_method not in repo_methods : logging . error ( config + " -- You don't have permisson to run the referenced method: " + ref_method ) valid = False except Exception as e : logging . error ( "Exception occurred while validating supervisor: " + str ( e ) ) raise return False return valid | Validate that all entries in the supervisor are valid task configurations and that all permissions requirements are satisfied . |
38,899 | def recover_and_supervise ( recovery_file ) : try : logging . info ( "Attempting to recover Supervisor data from " + recovery_file ) with open ( recovery_file ) as rf : recovery_data = json . load ( rf ) monitor_data = recovery_data [ 'monitor_data' ] dependencies = recovery_data [ 'dependencies' ] args = recovery_data [ 'args' ] except : logging . error ( "Could not recover monitor data, exiting..." ) return 1 logging . info ( "Data successfully loaded, resuming Supervisor" ) supervise_until_complete ( monitor_data , dependencies , args , recovery_file ) | Retrieve monitor data from recovery_file and resume monitoring |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.