idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
43,900
def hash ( self ) : return u'' . join ( [ self . alias , self . description , str ( self . ignored ) , str ( self . flags ) , ] )
Return a value that s used to uniquely identify an entry in a date so we can regroup all entries that share the same hash .
43,901
def add_flag ( self , flag ) : super ( Entry , self ) . add_flag ( flag ) self . _changed_attrs . add ( 'flags' )
Add flag to the flags and memorize this attribute has changed so we can regenerate it when outputting text .
43,902
def remove_flag ( self , flag ) : super ( Entry , self ) . remove_flag ( flag ) self . _changed_attrs . add ( 'flags' )
Remove flag to the flags and memorize this attribute has changed so we can regenerate it when outputting text .
43,903
def add_entry ( self , date , entry ) : in_date = False insert_at = 0 for ( lineno , line ) in enumerate ( self . lines ) : if isinstance ( line , DateLine ) and line . date == date : in_date = True insert_at = lineno continue if in_date : if isinstance ( line , Entry ) : insert_at = lineno elif isinstance ( line , DateLine ) : break self . lines . insert ( insert_at + 1 , entry ) if not isinstance ( self . lines [ insert_at ] , Entry ) : self . lines . insert ( insert_at + 1 , TextLine ( '' ) )
Add the given entry to the textual representation .
43,904
def delete_entries ( self , entries ) : self . lines = trim ( [ line for line in self . lines if not isinstance ( line , Entry ) or line not in entries ] )
Remove the given entries from the textual representation .
43,905
def delete_date ( self , date ) : self . lines = [ line for line in self . lines if not isinstance ( line , DateLine ) or line . date != date ] self . lines = trim ( self . lines )
Remove the date line from the textual representation . This doesn t remove any entry line .
43,906
def add_date ( self , date ) : self . lines = self . parser . add_date ( date , self . lines )
Add the given date to the textual representation .
43,907
def append ( self , x ) : super ( EntriesList , self ) . append ( x ) if self . entries_collection is not None : self . entries_collection . add_entry ( self . date , x )
Append the given element to the list and synchronize the textual representation .
43,908
async def _request ( self , path ) : url = '{}{}' . format ( self . base_url , path ) data = None try : async with self . websession . get ( url , auth = self . _auth , timeout = self . _timeout ) as response : if response . status == 200 : if response . headers [ 'content-type' ] == 'application/json' : data = await response . json ( ) else : data = await response . text ( ) except ( asyncio . TimeoutError , aiohttp . ClientError ) as error : _LOGGER . error ( 'Failed to communicate with IP Webcam: %s' , error ) self . _available = False return self . _available = True if isinstance ( data , str ) : return data . find ( "Ok" ) != - 1 return data
Make the actual request and return the parsed response .
43,909
async def update ( self ) : status_data = await self . _request ( '/status.json?show_avail=1' ) if status_data : self . status_data = status_data sensor_data = await self . _request ( '/sensors.json' ) if sensor_data : self . sensor_data = sensor_data
Fetch the latest data from IP Webcam .
43,910
def current_settings ( self ) : settings = { } if not self . status_data : return settings for ( key , val ) in self . status_data . get ( 'curvals' , { } ) . items ( ) : try : val = float ( val ) except ValueError : val = val if val in ( 'on' , 'off' ) : val = ( val == 'on' ) settings [ key ] = val return settings
Return dict with all config include .
43,911
def available_settings ( self ) : available = { } if not self . status_data : return available for ( key , val ) in self . status_data . get ( 'avail' , { } ) . items ( ) : available [ key ] = [ ] for subval in val : try : subval = float ( subval ) except ValueError : subval = subval if val in ( 'on' , 'off' ) : subval = ( subval == 'on' ) available [ key ] . append ( subval ) return available
Return dict of lists with all available config settings .
43,912
def change_setting ( self , key , val ) : if isinstance ( val , bool ) : payload = 'on' if val else 'off' else : payload = val return self . _request ( '/settings/{}?set={}' . format ( key , payload ) )
Change a setting .
43,913
def set_orientation ( self , orientation = 'landscape' ) : if orientation not in ALLOWED_ORIENTATIONS : _LOGGER . debug ( '%s is not a valid orientation' , orientation ) return False return self . change_setting ( 'orientation' , orientation )
Set the video orientation .
43,914
def set_scenemode ( self , scenemode = 'auto' ) : if scenemode not in self . available_settings [ 'scenemode' ] : _LOGGER . debug ( '%s is not a valid scenemode' , scenemode ) return False return self . change_setting ( 'scenemode' , scenemode )
Set the video scene mode .
43,915
def months_ago ( date , nb_months = 1 ) : nb_years = nb_months // 12 nb_months = nb_months % 12 month_diff = date . month - nb_months if month_diff > 0 : new_month = month_diff else : new_month = 12 + month_diff nb_years += 1 return date . replace ( day = 1 , month = new_month , year = date . year - nb_years )
Return the given date with nb_months substracted from it .
43,916
def list_ ( ctx , search_string , reverse , backend , used , inactive ) : if not reverse : list_aliases ( ctx , search_string , backend , used , inactive = inactive ) else : show_mapping ( ctx , search_string , backend )
List configured aliases . Aliases in red belong to inactive projects and trying to push entries to these aliases will probably result in an error .
43,917
def add ( ctx , alias , mapping , backend ) : if not backend : backends_list = ctx . obj [ 'settings' ] . get_backends ( ) if len ( backends_list ) > 1 : raise click . UsageError ( "You're using more than 1 backend. Please set the backend to " "add the alias to with the --backend option (choices are %s)" % ", " . join ( dict ( backends_list ) . keys ( ) ) ) add_mapping ( ctx , alias , mapping , backend )
Add a new alias to your configuration file .
43,918
def convert_to_4 ( self ) : from six . moves . urllib import parse if not self . config . has_section ( 'backends' ) : self . config . add_section ( 'backends' ) site = parse . urlparse ( self . get ( 'site' , default_value = '' ) ) backend_uri = 'zebra://{username}:{password}@{hostname}' . format ( username = self . get ( 'username' , default_value = '' ) , password = parse . quote ( self . get ( 'password' , default_value = '' ) , safe = '' ) , hostname = site . hostname ) self . config . set ( 'backends' , 'default' , backend_uri ) self . config . remove_option ( 'default' , 'username' ) self . config . remove_option ( 'default' , 'password' ) self . config . remove_option ( 'default' , 'site' ) if not self . config . has_section ( 'default_aliases' ) : self . config . add_section ( 'default_aliases' ) if not self . config . has_section ( 'default_shared_aliases' ) : self . config . add_section ( 'default_shared_aliases' ) if self . config . has_section ( 'wrmap' ) : for alias , mapping in self . config . items ( 'wrmap' ) : self . config . set ( 'default_aliases' , alias , mapping ) self . config . remove_section ( 'wrmap' ) if self . config . has_section ( 'shared_wrmap' ) : for alias , mapping in self . config . items ( 'shared_wrmap' ) : self . config . set ( 'default_shared_aliases' , alias , mapping ) self . config . remove_section ( 'shared_wrmap' )
Convert a pre - 4 . 0 configuration file to a 4 . 0 configuration file .
43,919
def autofill ( ctx , f ) : auto_fill_days = ctx . obj [ 'settings' ] [ 'auto_fill_days' ] if not auto_fill_days : ctx . obj [ 'view' ] . view . err ( "The parameter `auto_fill_days` must be set " "to use this command." ) return today = datetime . date . today ( ) last_day = calendar . monthrange ( today . year , today . month ) last_date = datetime . date ( today . year , today . month , last_day [ 1 ] ) timesheet_collection = get_timesheet_collection_for_context ( ctx , f ) t = timesheet_collection . latest ( ) t . prefill ( auto_fill_days , last_date ) t . save ( ) ctx . obj [ 'view' ] . msg ( "Your entries file has been filled." )
Fills your timesheet up to today for the defined auto_fill_days .
43,920
def alias ( ctx , search , backend ) : projects = ctx . obj [ 'projects_db' ] . search ( search , active_only = True ) projects = sorted ( projects , key = lambda project : project . name ) if len ( projects ) == 0 : ctx . obj [ 'view' ] . msg ( "No active project matches your search string '%s'." % '' . join ( search ) ) return ctx . obj [ 'view' ] . projects_list ( projects , True ) try : number = ctx . obj [ 'view' ] . select_project ( projects ) except CancelException : return project = projects [ number ] ctx . obj [ 'view' ] . project_with_activities ( project , numbered_activities = True ) try : number = ctx . obj [ 'view' ] . select_activity ( project . activities ) except CancelException : return retry = True while retry : try : alias = ctx . obj [ 'view' ] . select_alias ( ) except CancelException : return if alias in aliases_database : mapping = aliases_database [ alias ] overwrite = ctx . obj [ 'view' ] . overwrite_alias ( alias , mapping ) if not overwrite : return elif overwrite : retry = False else : retry = True else : retry = False activity = project . activities [ number ] mapping = Mapping ( mapping = ( project . id , activity . id ) , backend = project . backend ) ctx . obj [ 'settings' ] . add_alias ( alias , mapping ) ctx . obj [ 'settings' ] . write_config ( ) ctx . obj [ 'view' ] . alias_added ( alias , ( project . id , activity . id ) )
Searches for the given project and interactively add an alias for it .
43,921
def show ( ctx , project_id , backend ) : try : project = ctx . obj [ 'projects_db' ] . get ( project_id , backend ) except IOError : raise Exception ( "Error: the projects database file doesn't exist. " "Please run `taxi update` to create it" ) if project is None : ctx . obj [ 'view' ] . err ( "Could not find project `%s`" % ( project_id ) ) else : ctx . obj [ 'view' ] . project_with_activities ( project )
Shows the details of the given project id .
43,922
def stop ( ctx , description , f ) : description = ' ' . join ( description ) try : timesheet_collection = get_timesheet_collection_for_context ( ctx , f ) current_timesheet = timesheet_collection . latest ( ) current_timesheet . continue_entry ( datetime . date . today ( ) , datetime . datetime . now ( ) . time ( ) , description ) except ParseError as e : ctx . obj [ 'view' ] . err ( e ) except NoActivityInProgressError as e : ctx . obj [ 'view' ] . err ( e ) except StopInThePastError as e : ctx . obj [ 'view' ] . err ( e ) else : current_timesheet . save ( )
Use it when you stop working on the current task . You can add a description to what you ve done .
43,923
def start ( ctx , alias , description , f ) : today = datetime . date . today ( ) try : timesheet_collection = get_timesheet_collection_for_context ( ctx , f ) except ParseError as e : ctx . obj [ 'view' ] . err ( e ) return t = timesheet_collection . latest ( ) today_entries = t . entries . filter ( date = today ) if ( today in today_entries and today_entries [ today ] and isinstance ( today_entries [ today ] [ - 1 ] . duration , tuple ) and today_entries [ today ] [ - 1 ] . duration [ 1 ] is not None ) : new_entry_start_time = today_entries [ today ] [ - 1 ] . duration [ 1 ] else : new_entry_start_time = datetime . datetime . now ( ) description = ' ' . join ( description ) if description else '?' duration = ( new_entry_start_time , None ) e = Entry ( alias , duration , description ) t . entries [ today ] . append ( e ) t . save ( )
Use it when you start working on the given activity . This will add the activity and the current time to your entries file . When you re finished use the stop command .
43,924
def to_text ( self , line ) : return getattr ( self , self . ENTRY_TRANSFORMERS [ line . __class__ ] ) ( line )
Return the textual representation of the given line .
43,925
def update ( ctx ) : ctx . obj [ 'view' ] . updating_projects_database ( ) projects = [ ] for backend_name , backend_uri in ctx . obj [ 'settings' ] . get_backends ( ) : backend = plugins_registry . get_backend ( backend_name ) backend_projects = backend . get_projects ( ) for project in backend_projects : project . backend = backend_name projects += backend_projects ctx . obj [ 'projects_db' ] . update ( projects ) shared_aliases = { } backends_to_clear = set ( ) for project in projects : for alias , activity_id in six . iteritems ( project . aliases ) : mapping = Mapping ( mapping = ( project . id , activity_id ) , backend = project . backend ) shared_aliases [ alias ] = mapping backends_to_clear . add ( project . backend ) for backend in backends_to_clear : ctx . obj [ 'settings' ] . clear_shared_aliases ( backend ) current_aliases = ctx . obj [ 'settings' ] . get_aliases ( ) removed_aliases = [ ( alias , mapping ) for alias , mapping in current_aliases . items ( ) if ( alias in shared_aliases and shared_aliases [ alias ] . backend == mapping . backend and mapping . mapping [ : 2 ] != shared_aliases [ alias ] . mapping [ : 2 ] ) ] if removed_aliases : ctx . obj [ 'settings' ] . remove_aliases ( removed_aliases ) for alias , mapping in shared_aliases . items ( ) : ctx . obj [ 'settings' ] . add_shared_alias ( alias , mapping ) aliases_after_update = ctx . obj [ 'settings' ] . get_aliases ( ) ctx . obj [ 'settings' ] . write_config ( ) ctx . obj [ 'view' ] . projects_database_update_success ( aliases_after_update , ctx . obj [ 'projects_db' ] )
Synchronizes your project database with the server and updates the shared aliases .
43,926
def attributes ( self ) : if not self . _attribute_pages : self . fetch_attributes ( ) result = { } for page in self . _attribute_pages . values ( ) : result . update ( page . attributes ) return result
A dictionary mapping names of attributes to BiomartAttribute instances .
43,927
def get_backends_by_class ( self , backend_class ) : return [ backend for backend in self . _backends_registry . values ( ) if isinstance ( backend , backend_class ) ]
Return a list of backends that are instances of the given backend_class .
43,928
def _load_backend ( self , backend_uri , context ) : parsed = parse . urlparse ( backend_uri ) options = dict ( parse . parse_qsl ( parsed . query ) ) try : backend = self . _entry_points [ self . BACKENDS_ENTRY_POINT ] [ parsed . scheme ] . load ( ) except KeyError : raise BackendNotFoundError ( "The requested backend `%s` could not be found in the " "registered entry points. Perhaps you forgot to install the " "corresponding backend package?" % parsed . scheme ) password = ( parse . unquote ( parsed . password ) if parsed . password else parsed . password ) return backend ( username = parsed . username , password = password , hostname = parsed . hostname , port = parsed . port , path = parsed . path , options = options , context = context , )
Return the instantiated backend object identified by the given backend_uri .
43,929
def register_commands ( self ) : for command in self . _entry_points [ self . COMMANDS_ENTRY_POINT ] . values ( ) : command . load ( )
Load entry points for custom commands .
43,930
def _add_or_remove_flag ( self , flag , add ) : meth = self . add_flag if add else self . remove_flag meth ( flag )
Add the given flag if add is True remove it otherwise .
43,931
def get_application_configuration ( name ) : _check ( ) rc = _ec . get_application_configuration ( name ) if rc is False : raise ValueError ( "Application configuration {0} not found." . format ( name ) ) return rc
Get a named application configuration .
43,932
def _submit ( primitive , port_index , tuple_ ) : args = ( _get_opc ( primitive ) , port_index , tuple_ ) _ec . _submit ( args )
Internal method to submit a tuple
43,933
def value ( self , value ) : args = ( self . __ptr , int ( value ) ) _ec . metric_set ( args )
Set the current value of the metric .
43,934
def resource_tags ( self ) : try : plc = self . _op ( ) . _placement if not 'resourceTags' in plc : plc [ 'resourceTags' ] = set ( ) return plc [ 'resourceTags' ] except TypeError : return frozenset ( )
Resource tags for this processing logic .
43,935
def _vcap_from_service_definition ( service_def ) : if 'credentials' in service_def : credentials = service_def [ 'credentials' ] else : credentials = service_def service = { } service [ 'credentials' ] = credentials service [ 'name' ] = _name_from_service_definition ( service_def ) vcap = { 'streaming-analytics' : [ service ] } return vcap
Turn a service definition into a vcap services containing a single service .
43,936
def _get_java_env ( self ) : "Pass the VCAP through the environment to the java submission" env = super ( _StreamingAnalyticsSubmitter , self ) . _get_java_env ( ) vcap = streamsx . rest . _get_vcap_services ( self . _vcap_services ) env [ 'VCAP_SERVICES' ] = json . dumps ( vcap ) return env
Pass the VCAP through the environment to the java submission
43,937
def _get_java_env ( self ) : "Set env vars from connection if set" env = super ( _DistributedSubmitter , self ) . _get_java_env ( ) if self . _streams_connection is not None : sc = self . _streams_connection if isinstance ( sc . _delegator , streamsx . rest_primitives . _StreamsRestDelegator ) : env . pop ( 'STREAMS_DOMAIN_ID' , None ) env . pop ( 'STREAMS_INSTANCE_ID' , None ) else : env [ 'STREAMS_DOMAIN_ID' ] = sc . get_domains ( ) [ 0 ] . id if not ConfigParams . SERVICE_DEFINITION in self . _config ( ) : env [ 'STREAMS_REST_URL' ] = sc . resource_url env [ 'STREAMS_USERNAME' ] = sc . session . auth [ 0 ] env [ 'STREAMS_PASSWORD' ] = sc . session . auth [ 1 ] return env
Set env vars from connection if set
43,938
def from_overlays ( overlays ) : jc = JobConfig ( ) jc . comment = overlays . get ( 'comment' ) if 'jobConfigOverlays' in overlays : if len ( overlays [ 'jobConfigOverlays' ] ) >= 1 : jco = copy . deepcopy ( overlays [ 'jobConfigOverlays' ] [ 0 ] ) if 'jobConfig' in jco : _jc = jco [ 'jobConfig' ] jc . job_name = _jc . pop ( 'jobName' , None ) jc . job_group = _jc . pop ( 'jobGroup' , None ) jc . preload = _jc . pop ( 'preloadApplicationBundles' , False ) jc . data_directory = _jc . pop ( 'dataDirectory' , None ) jc . tracing = _jc . pop ( 'tracing' , None ) for sp in _jc . pop ( 'submissionParameters' , [ ] ) : jc . submission_parameters [ sp [ 'name' ] ] = sp [ 'value' ] if not _jc : del jco [ 'jobConfig' ] if 'deploymentConfig' in jco : _dc = jco [ 'deploymentConfig' ] if 'manual' == _dc . get ( 'fusionScheme' ) : if 'fusionTargetPeCount' in _dc : jc . target_pe_count = _dc . pop ( 'fusionTargetPeCount' ) if len ( _dc ) == 1 : del jco [ 'deploymentConfig' ] if jco : jc . raw_overlay = jco return jc
Create a JobConfig instance from a full job configuration overlays object .
43,939
def _add_overlays ( self , config ) : if self . _comment : config [ 'comment' ] = self . _comment jco = { } config [ "jobConfigOverlays" ] = [ jco ] if self . _raw_overlay : jco . update ( self . _raw_overlay ) jc = jco . get ( 'jobConfig' , { } ) if self . job_name is not None : jc [ "jobName" ] = self . job_name if self . job_group is not None : jc [ "jobGroup" ] = self . job_group if self . data_directory is not None : jc [ "dataDirectory" ] = self . data_directory if self . preload : jc [ 'preloadApplicationBundles' ] = True if self . tracing is not None : jc [ 'tracing' ] = self . tracing if self . submission_parameters : sp = jc . get ( 'submissionParameters' , [ ] ) for name in self . submission_parameters : sp . append ( { 'name' : str ( name ) , 'value' : self . submission_parameters [ name ] } ) jc [ 'submissionParameters' ] = sp if jc : jco [ "jobConfig" ] = jc if self . target_pe_count is not None and self . target_pe_count >= 1 : deployment = jco . get ( 'deploymentConfig' , { } ) deployment . update ( { 'fusionScheme' : 'manual' , 'fusionTargetPeCount' : self . target_pe_count } ) jco [ "deploymentConfig" ] = deployment return config
Add this as a jobConfigOverlays JSON to config .
43,940
def job ( self ) : if self . _submitter and hasattr ( self . _submitter , '_job_access' ) : return self . _submitter . _job_access ( ) return None
REST binding for the job associated with the submitted build .
43,941
def cancel_job_button ( self , description = None ) : if not hasattr ( self , 'jobId' ) : return try : import ipywidgets as widgets if not description : description = 'Cancel job: ' description += self . name if hasattr ( self , 'name' ) else self . job . name button = widgets . Button ( description = description , button_style = 'danger' , layout = widgets . Layout ( width = '40%' ) ) out = widgets . Output ( ) vb = widgets . VBox ( [ button , out ] ) @ out . capture ( clear_output = True ) def _cancel_job_click ( b ) : b . disabled = True print ( 'Cancelling job: id=' + str ( self . job . id ) + ' ...\n' , flush = True ) try : rc = self . job . cancel ( ) out . clear_output ( ) if rc : print ( 'Cancelled job: id=' + str ( self . job . id ) + ' : ' + self . job . name + '\n' , flush = True ) else : print ( 'Job already cancelled: id=' + str ( self . job . id ) + ' : ' + self . job . name + '\n' , flush = True ) except : b . disabled = False out . clear_output ( ) raise button . on_click ( _cancel_job_click ) display ( vb ) except : pass
Display a button that will cancel the submitted job .
43,942
def add_default ( self , key : str , value : Optional [ str ] , default_type : type = str ) -> None : self . defaults [ key ] = HierarkeyDefault ( value , default_type )
Adds a default value and a default type for a key .
43,943
def add_type ( self , type : type , serialize : Callable [ [ Any ] , str ] , unserialize : Callable [ [ str ] , Any ] ) -> None : self . types . append ( HierarkeyType ( type = type , serialize = serialize , unserialize = unserialize ) )
Adds serialization support for a new type .
43,944
def set_global ( self , cache_namespace : str = None ) -> type : if isinstance ( cache_namespace , type ) : raise ImproperlyConfigured ( 'Incorrect decorator usage, you need to use .add_global() ' 'instead of .add_global' ) def wrapper ( wrapped_class ) : if issubclass ( wrapped_class , models . Model ) : raise ImproperlyConfigured ( 'Hierarkey.add_global() can only be invoked on a normal class, ' 'not on a Django model.' ) if not issubclass ( wrapped_class , GlobalSettingsBase ) : raise ImproperlyConfigured ( 'You should use .add_global() on a class that inherits from ' 'GlobalSettingsBase.' ) _cache_namespace = cache_namespace or ( '%s_%s' % ( wrapped_class . __name__ , self . attribute_name ) ) attrs = self . _create_attrs ( wrapped_class ) model_name = '%s_%sStore' % ( wrapped_class . __name__ , self . attribute_name . title ( ) ) if getattr ( sys . modules [ wrapped_class . __module__ ] , model_name , None ) : return wrapped_class kv_model = self . _create_model ( model_name , attrs ) def init ( self , * args , object = None , ** kwargs ) : super ( kv_model , self ) . __init__ ( * args , ** kwargs ) setattr ( kv_model , '__init__' , init ) hierarkey = self def prop ( iself ) : from . proxy import HierarkeyProxy attrname = '_hierarkey_proxy_{}_{}' . format ( _cache_namespace , self . attribute_name ) cached = getattr ( iself , attrname , None ) if not cached : cached = HierarkeyProxy . _new ( iself , type = kv_model , hierarkey = hierarkey , cache_namespace = _cache_namespace ) setattr ( iself , attrname , cached ) return cached setattr ( sys . modules [ wrapped_class . __module__ ] , model_name , kv_model ) setattr ( wrapped_class , '_%s_objects' % self . attribute_name , kv_model . objects ) setattr ( wrapped_class , self . attribute_name , property ( prop ) ) self . global_class = wrapped_class return wrapped_class return wrapper
Decorator . Attaches the global key - value store of this hierarchy to an object .
43,945
def add ( self , cache_namespace : str = None , parent_field : str = None ) -> type : if isinstance ( cache_namespace , type ) : raise ImproperlyConfigured ( 'Incorrect decorator usage, you need to use .add() instead of .add' ) def wrapper ( model ) : if not issubclass ( model , models . Model ) : raise ImproperlyConfigured ( 'Hierarkey.add() can only be invoked on a Django model' ) _cache_namespace = cache_namespace or ( '%s_%s' % ( model . __name__ , self . attribute_name ) ) attrs = self . _create_attrs ( model ) attrs [ 'object' ] = models . ForeignKey ( model , related_name = '_%s_objects' % self . attribute_name , on_delete = models . CASCADE ) model_name = '%s_%sStore' % ( model . __name__ , self . attribute_name . title ( ) ) kv_model = self . _create_model ( model_name , attrs ) setattr ( sys . modules [ model . __module__ ] , model_name , kv_model ) hierarkey = self def prop ( iself ) : from . proxy import HierarkeyProxy attrname = '_hierarkey_proxy_{}_{}' . format ( _cache_namespace , self . attribute_name ) cached = getattr ( iself , attrname , None ) if not cached : try : parent = getattr ( iself , parent_field ) if parent_field else None except models . ObjectDoesNotExist : parent = None if not parent and hierarkey . global_class : parent = hierarkey . global_class ( ) cached = HierarkeyProxy . _new ( iself , type = kv_model , hierarkey = hierarkey , parent = parent , cache_namespace = _cache_namespace ) setattr ( iself , attrname , cached ) return cached setattr ( model , self . attribute_name , property ( prop ) ) return model return wrapper
Decorator . Attaches a global key - value store to a Django model .
43,946
def _as_spl_expr ( value ) : import streamsx . _streams . _numpy if hasattr ( value , 'spl_json' ) : return value if isinstance ( value , Enum ) : value = streamsx . spl . op . Expression . expression ( value . name ) npcnv = streamsx . _streams . _numpy . as_spl_expr ( value ) if npcnv is not None : return npcnv return value
Return value converted to an SPL expression if needed other otherwise value .
43,947
def _requested_name ( self , name , action = None , func = None ) : if name is not None : if name in self . _used_names : n = 2 while True : pn = name + '_' + str ( n ) if pn not in self . _used_names : self . _used_names . add ( pn ) return pn n += 1 else : self . _used_names . add ( name ) return name if func is not None : if hasattr ( func , '__name__' ) : name = func . __name__ if name == '<lambda>' : name = action + '_lambda' elif hasattr ( func , '__class__' ) : name = func . __class__ . __name__ if name is None : if action is not None : name = action else : name = self . name return self . _requested_name ( name )
Create a unique name for an operator or a stream .
43,948
def colocate ( self , others , why ) : if isinstance ( self , Marker ) : return colocate_tag = '__spl_' + why + '$' + str ( self . index ) self . _colocate_tag ( colocate_tag ) for op in others : op . _colocate_tag ( colocate_tag )
Colocate this operator with another .
43,949
def expression ( value ) : if isinstance ( value , Expression ) : return Expression ( value . _type , value . _value ) if hasattr ( value , 'spl_json' ) : sj = value . spl_json ( ) return Expression ( sj [ 'type' ] , sj [ 'value' ] ) return Expression ( 'splexpr' , value )
Create an SPL expression .
43,950
def spl_json ( self ) : _splj = { } _splj [ "type" ] = self . _type _splj [ "value" ] = self . _value return _splj
Private method . May be removed at any time .
43,951
def _setup_info_xml ( self , languageList ) : infoXmlFile = os . path . join ( self . _tk_dir , 'info.xml' ) print ( 'Check info.xml:' , infoXmlFile ) try : TopologySplpyResourceMessageSetFound = False TopologySplpyResourceLanguages = [ ] tree = ET . parse ( infoXmlFile ) root = tree . getroot ( ) for resources in root . findall ( '{http://www.ibm.com/xmlns/prod/streams/spl/toolkitInfo}resources' ) : if self . _cmd_args . verbose : print ( 'Resource: ' , resources . tag ) for messageSet in resources . findall ( '{http://www.ibm.com/xmlns/prod/streams/spl/toolkitInfo}messageSet' ) : if self . _cmd_args . verbose : print ( 'Message set:' , messageSet . tag , messageSet . attrib ) if 'name' in messageSet . attrib : if messageSet . attrib [ 'name' ] == 'TopologySplpyResource' : TopologySplpyResourceMessageSetFound = True for lang in messageSet . findall ( '{http://www.ibm.com/xmlns/prod/streams/spl/toolkitInfo}lang' ) : language = os . path . dirname ( lang . text ) TopologySplpyResourceLanguages . append ( language ) if TopologySplpyResourceMessageSetFound : TopologySplpyResourceLanguages . sort ( ) languageList . sort ( ) copiedLanguagesSet = set ( languageList ) resourceLanguageSet = set ( TopologySplpyResourceLanguages ) if self . _cmd_args . verbose : print ( 'copied language resources:\n' , languageList ) print ( 'TopologySplpyResource from info.xml:\n' , TopologySplpyResourceLanguages ) if copiedLanguagesSet == resourceLanguageSet : print ( 'Resource section of info.xml verified' ) else : errstr = + _INFO_XML_TEMPLATE sys . exit ( errstr ) else : errstr = + _INFO_XML_TEMPLATE sys . exit ( errstr ) except FileNotFoundError as e : print ( "WARNING: File info.xml not found. Creating info.xml from template" ) projectRootDir = os . path . abspath ( self . _tk_dir ) projectName = os . path . basename ( projectRootDir ) infoXml = _INFO_XML_TEMPLATE . replace ( '__SPLPY_TOOLKIT_NAME__' , projectName ) f = open ( infoXmlFile , 'w' ) f . write ( infoXml ) f . close ( ) except SystemExit as e : raise e except : errstr = + _INFO_XML_TEMPLATE sys . exit ( errstr )
Setup the info . xml file This function prepares or checks the info . xml file in the project directory - if the info . xml does not exist in the project directory it copies the template info . xml into the project directory . The project name is obtained from the project directory name - If there is a info . xml file the resource section is inspected . If the resource section has no valid message set description for the TopologySplpy Resource a warning message is printed
43,952
def main ( args = None ) : _parse_args ( args ) streamsx . _streams . _version . _mismatch_check ( 'streamsx.topology.context' ) srp = pkg_resources . working_set . find ( pkg_resources . Requirement . parse ( 'streamsx' ) ) if srp is not None : srv = srp . parsed_version location = srp . location spkg = 'package' else : srv = streamsx . _streams . _version . __version__ location = os . path . dirname ( streamsx . _streams . _version . __file__ ) location = os . path . dirname ( location ) location = os . path . dirname ( location ) tk_path = ( os . path . join ( 'com.ibm.streamsx.topology' , 'opt' , 'python' , 'packages' ) ) spkg = 'toolkit' if location . endswith ( tk_path ) else 'unknown' print ( 'streamsx==' + str ( srv ) + ' (' + spkg + ')' ) print ( ' location: ' + str ( location ) ) print ( 'Python version:' + str ( sys . version ) ) print ( 'PYTHONHOME=' + str ( os . environ . get ( 'PYTHONHOME' , 'unset' ) ) ) print ( 'PYTHONPATH=' + str ( os . environ . get ( 'PYTHONPATH' , 'unset' ) ) ) print ( 'PYTHONWARNINGS=' + str ( os . environ . get ( 'PYTHONWARNINGS' , 'unset' ) ) ) print ( 'STREAMS_INSTALL=' + str ( os . environ . get ( 'STREAMS_INSTALL' , 'unset' ) ) ) print ( 'JAVA_HOME=' + str ( os . environ . get ( 'JAVA_HOME' , 'unset' ) ) ) return 0
Output information about streamsx and the environment . Useful for support to get key information for use of streamsx and Python in IBM Streams .
43,953
def operator_driven ( drain_timeout = _DEFAULT_DRAIN , reset_timeout = _DEFAULT_RESET , max_consecutive_attempts = _DEFAULT_ATTEMPTS ) : return ConsistentRegionConfig ( trigger = ConsistentRegionConfig . Trigger . OPERATOR_DRIVEN , drain_timeout = drain_timeout , reset_timeout = reset_timeout , max_consecutive_attempts = max_consecutive_attempts )
Define an operator - driven consistent region configuration . The source operator triggers drain and checkpoint cycles for the region .
43,954
def _get_timestamp_tuple ( ts ) : if isinstance ( ts , datetime . datetime ) : return Timestamp . from_datetime ( ts ) . tuple ( ) elif isinstance ( ts , Timestamp ) : return ts raise TypeError ( 'Timestamp or datetime.datetime required' )
Internal method to get a timestamp tuple from a value . Handles input being a datetime or a Timestamp .
43,955
def add_toolkit ( topology , location ) : import streamsx . topology . topology assert isinstance ( topology , streamsx . topology . topology . Topology ) tkinfo = dict ( ) tkinfo [ 'root' ] = os . path . abspath ( location ) topology . graph . _spl_toolkits . append ( tkinfo )
Add an SPL toolkit to a topology .
43,956
def add_toolkit_dependency ( topology , name , version ) : import streamsx . topology . topology assert isinstance ( topology , streamsx . topology . topology . Topology ) tkinfo = dict ( ) tkinfo [ 'name' ] = name tkinfo [ 'version' ] = version topology . graph . _spl_toolkits . append ( tkinfo )
Add a version dependency on an SPL toolkit to a topology .
43,957
def submit ( args = None ) : streamsx . _streams . _version . _mismatch_check ( 'streamsx.topology.context' ) cmd_args = _parse_args ( args ) if cmd_args . topology is not None : app = _get_topology_app ( cmd_args ) elif cmd_args . main_composite is not None : app = _get_spl_app ( cmd_args ) elif cmd_args . bundle is not None : app = _get_bundle ( cmd_args ) _job_config_args ( cmd_args , app ) sr = _submit ( cmd_args , app ) if 'return_code' not in sr : sr [ 'return_code' ] = 1 print ( sr ) return sr
Performs the submit according to arguments and returns an object describing the result .
43,958
def _define_jco_args ( cmd_parser ) : jo_group = cmd_parser . add_argument_group ( 'Job options' , 'Job configuration options' ) jo_group . add_argument ( '--job-name' , help = 'Job name' ) jo_group . add_argument ( '--preload' , action = 'store_true' , help = 'Preload job onto all resources in the instance' ) jo_group . add_argument ( '--trace' , choices = [ 'error' , 'warn' , 'info' , 'debug' , 'trace' ] , help = 'Application trace level' ) jo_group . add_argument ( '--submission-parameters' , '-p' , nargs = '+' , action = _SubmitParamArg , help = "Submission parameters as name=value pairs" ) jo_group . add_argument ( '--job-config-overlays' , help = "Path to file containing job configuration overlays JSON. Overrides any job configuration set by the application." , metavar = 'file' ) return jo_group ,
Define job configuration arguments . Returns groups defined currently one .
43,959
def _submit_topology ( cmd_args , app ) : cfg = app . cfg if cmd_args . create_bundle : ctxtype = ctx . ContextTypes . BUNDLE elif cmd_args . service_name : cfg [ ctx . ConfigParams . FORCE_REMOTE_BUILD ] = True cfg [ ctx . ConfigParams . SERVICE_NAME ] = cmd_args . service_name ctxtype = ctx . ContextTypes . STREAMING_ANALYTICS_SERVICE sr = ctx . submit ( ctxtype , app . app , cfg ) return sr
Submit a Python topology to the service . This includes an SPL main composite wrapped in a Python topology .
43,960
def _submit_bundle ( cmd_args , app ) : sac = streamsx . rest . StreamingAnalyticsConnection ( service_name = cmd_args . service_name ) sas = sac . get_streaming_analytics ( ) sr = sas . submit_job ( bundle = app . app , job_config = app . cfg [ ctx . ConfigParams . JOB_CONFIG ] ) if 'exception' in sr : rc = 1 elif 'status_code' in sr : try : rc = 0 if int ( sr [ 'status_code' ] == 200 ) else 1 except : rc = 1 elif 'id' in sr or 'jobId' in sr : rc = 0 sr [ 'return_code' ] = rc return sr
Submit an existing bundle to the service
43,961
def pipe ( wrapped ) : if not inspect . isfunction ( wrapped ) : raise TypeError ( 'A function is required' ) return _wrapforsplop ( _OperatorType . Pipe , wrapped , 'position' , False )
Decorator to create an SPL operator from a function . A pipe SPL operator with a single input port and a single output port . For each tuple on the input port the function is called passing the contents of the tuple .
43,962
def _define_fixed ( wrapped , callable_ ) : is_class = inspect . isclass ( wrapped ) style = callable_ . _splpy_style if hasattr ( callable_ , '_splpy_style' ) else wrapped . _splpy_style if style == 'dictionary' : return - 1 fixed_count = 0 if style == 'tuple' : sig = _inspect . signature ( callable_ ) pmds = sig . parameters itpmds = iter ( pmds ) if is_class : next ( itpmds ) for pn in itpmds : param = pmds [ pn ] if param . kind == _inspect . Parameter . POSITIONAL_OR_KEYWORD : fixed_count += 1 if param . kind == _inspect . Parameter . VAR_POSITIONAL : fixed_count = - 1 break if param . kind == _inspect . Parameter . VAR_KEYWORD : break return fixed_count
For the callable see how many positional parameters are required
43,963
def ignore ( wrapped ) : @ functools . wraps ( wrapped ) def _ignore ( * args , ** kwargs ) : return wrapped ( * args , ** kwargs ) _ignore . _splpy_optype = _OperatorType . Ignore _ignore . _splpy_file = inspect . getsourcefile ( wrapped ) return _ignore
Decorator to ignore a Python function .
43,964
def sink ( wrapped ) : if not inspect . isfunction ( wrapped ) : raise TypeError ( 'A function is required' ) return _wrapforsplop ( _OperatorType . Sink , wrapped , 'position' , False )
Creates an SPL operator with a single input port .
43,965
def submit ( self , port_id , tuple_ ) : port_index = self . _splpy_output_ports [ port_id ] ec . _submit ( self , port_index , tuple_ )
Submit a tuple to the output port .
43,966
def _splpy_primitive_input_fns ( obj ) : ofns = list ( ) for fn in obj . _splpy_input_ports : ofns . append ( getattr ( obj , fn . __name__ ) ) return ofns
Convert the list of class input functions to be instance functions against obj . Used by
43,967
def _splpy_all_ports_ready ( callable_ ) : if hasattr ( type ( callable_ ) , 'all_ports_ready' ) : try : return callable_ . all_ports_ready ( ) except : ei = sys . exc_info ( ) if streamsx . _streams . _runtime . _call_exit ( callable_ , ei ) : return None raise e1 [ 1 ] return None
Call all_ports_ready for a primitive operator .
43,968
def receive ( self , ikpdb ) : while self . _network_loop : _logger . n_debug ( "Enter socket.recv(%s) with self._received_data = %s" , self . SOCKET_BUFFER_SIZE , self . _received_data ) try : if self . SOCKET_BUFFER_SIZE : data = self . _connection . recv ( self . SOCKET_BUFFER_SIZE ) else : data = b'' _logger . n_debug ( "Socket.recv(%s) => %s" , self . SOCKET_BUFFER_SIZE , data ) except socket . timeout : _logger . n_debug ( "socket.timeout witk ikpdb.status=%s" , ikpdb . status ) if ikpdb . status == 'terminated' : _logger . n_debug ( "breaking IKPdbConnectionHandler.receive() " "network loop as ikpdb state is 'terminated'." ) return { 'command' : '_InternalQuit' , 'args' : { } } continue except socket . error as socket_err : if ikpdb . status == 'terminated' : return { 'command' : '_InternalQuit' , 'args' : { 'socket_error_number' : socket_err . errno , 'socket_error_str' : socket_err . strerror } } continue except Exception as exc : _logger . g_error ( "Unexecpected Error: '%s' in IKPdbConnectionHandler" ".command_loop." , exc ) _logger . g_error ( traceback . format_exc ( ) ) print ( "" . join ( traceback . format_stack ( ) ) ) return { 'command' : '_InternalQuit' , 'args' : { "error" : exc . __class__ . __name__ , "message" : exc . message } } self . _received_data += data . decode ( 'utf-8' ) try : magic_code_idx = self . _received_data . index ( self . MAGIC_CODE ) except ValueError : continue try : length_idx = self . _received_data . index ( u'length=' ) except ValueError : continue json_length = int ( self . _received_data [ length_idx + 7 : magic_code_idx ] ) message_length = magic_code_idx + len ( self . MAGIC_CODE ) + json_length if message_length <= len ( self . _received_data ) : full_message = self . _received_data [ : message_length ] self . _received_data = self . _received_data [ message_length : ] if len ( self . _received_data ) > 0 : self . SOCKET_BUFFER_SIZE = 0 else : self . SOCKET_BUFFER_SIZE = 4096 break else : self . SOCKET_BUFFER_SIZE = message_length - len ( self . _received_data ) self . log_received ( full_message ) obj = self . decode ( full_message ) return obj
Waits for a message from the debugger and returns it as a dict .
43,969
def clear ( self ) : del IKBreakpoint . breakpoints_by_file_and_line [ self . file_name , self . line_number ] IKBreakpoint . breakpoints_by_number [ self . number ] = None IKBreakpoint . breakpoints_files [ self . file_name ] . remove ( self . line_number ) if len ( IKBreakpoint . breakpoints_files [ self . file_name ] ) == 0 : del IKBreakpoint . breakpoints_files [ self . file_name ] IKBreakpoint . update_active_breakpoint_flag ( )
Clear a breakpoint by removing it from all lists .
43,970
def update_active_breakpoint_flag ( cls ) : cls . any_active_breakpoint = any ( [ bp . enabled for bp in cls . breakpoints_by_number if bp ] )
Checks all breakpoints to find wether at least one is active and update any_active_breakpoint accordingly .
43,971
def disable_all_breakpoints ( cls ) : for bp in cls . breakpoints_by_number : if bp : bp . enabled = False cls . update_active_breakpoint_flag ( ) return
Disable all breakpoints and udate active_breakpoint_flag .
43,972
def backup_breakpoints_state ( cls ) : all_breakpoints_state = [ ] for bp in cls . breakpoints_by_number : if bp : all_breakpoints_state . append ( ( bp . number , bp . enabled , bp . condition , ) ) return all_breakpoints_state
Returns the state of all breakpoints in a list that can be used later to restore all breakpoints state
43,973
def canonic ( self , file_name ) : if file_name == "<" + file_name [ 1 : - 1 ] + ">" : return file_name c_file_name = self . file_name_cache . get ( file_name ) if not c_file_name : c_file_name = os . path . abspath ( file_name ) c_file_name = os . path . normcase ( c_file_name ) self . file_name_cache [ file_name ] = c_file_name return c_file_name
returns canonical version of a file name . A canonical file name is an absolute lowercase normalized path to a given file .
43,974
def object_properties_count ( self , o ) : o_type = type ( o ) if isinstance ( o , ( dict , list , tuple , set ) ) : return len ( o ) elif isinstance ( o , ( type ( None ) , bool , float , str , int , bytes , types . ModuleType , types . MethodType , types . FunctionType ) ) : return 0 else : try : if hasattr ( o , '__dict__' ) : count = len ( [ m_name for m_name , m_value in o . __dict__ . items ( ) if not m_name . startswith ( '__' ) and not type ( m_value ) in ( types . ModuleType , types . MethodType , types . FunctionType , ) ] ) else : count = 0 except : count = 0 return count
returns the number of user browsable properties of an object .
43,975
def evaluate ( self , frame_id , expression , global_context = False , disable_break = False ) : if disable_break : breakpoints_backup = IKBreakpoint . backup_breakpoints_state ( ) IKBreakpoint . disable_all_breakpoints ( ) if frame_id and not global_context : eval_frame = ctypes . cast ( frame_id , ctypes . py_object ) . value global_vars = eval_frame . f_globals local_vars = eval_frame . f_locals else : global_vars = None local_vars = None try : result = eval ( expression , global_vars , local_vars ) result_type = IKPdbRepr ( result ) result_value = repr ( result ) except SyntaxError : try : result = exec ( expression , global_vars , local_vars ) result_type = IKPdbRepr ( result ) result_value = repr ( result ) except Exception as e : t , result = sys . exc_info ( ) [ : 2 ] if isinstance ( t , str ) : result_type = t else : result_type = str ( t . __name__ ) result_value = "%s: %s" % ( result_type , result , ) except : t , result = sys . exc_info ( ) [ : 2 ] if isinstance ( t , str ) : result_type = t else : result_type = t . __name__ result_value = "%s: %s" % ( result_type , result , ) if disable_break : IKBreakpoint . restore_breakpoints_state ( breakpoints_backup ) _logger . e_debug ( "evaluate(%s) => result_value=%s, result_type=%s, result=%s" , expression , result_value , result_type , result ) if self . CGI_ESCAPE_EVALUATE_OUTPUT : result_value = cgi . escape ( result_value ) try : json . dumps ( result_value ) except : t , result = sys . exc_info ( ) [ : 2 ] if isinstance ( t , str ) : result_type = t else : result_type = t . __name__ result_value = "<plaintext>%s: IKP3db is unable to JSON encode result to send it to " "debugging client.\n" " This typically occurs if you try to print a string that cannot be" " decoded to 'UTF-8'.\n" " You should be able to evaluate result and inspect it's content" " by removing the print statement." % result_type return result_value , result_type
Evaluates expression in the context of the frame identified by frame_id or globally . Breakpoints are disabled depending on disable_break value . Returns a tuple of value and type both as str . Note that - depending on the CGI_ESCAPE_EVALUATE_OUTPUT attribute - value is escaped .
43,976
def let_variable ( self , frame_id , var_name , expression_value ) : breakpoints_backup = IKBreakpoint . backup_breakpoints_state ( ) IKBreakpoint . disable_all_breakpoints ( ) let_expression = "%s=%s" % ( var_name , expression_value , ) eval_frame = ctypes . cast ( frame_id , ctypes . py_object ) . value global_vars = eval_frame . f_globals local_vars = eval_frame . f_locals try : exec ( let_expression , global_vars , local_vars ) error_message = "" except Exception as e : t , result = sys . exc_info ( ) [ : 2 ] if isinstance ( t , str ) : result_type = t else : result_type = str ( t . __name__ ) error_message = "%s: %s" % ( result_type , result , ) IKBreakpoint . restore_breakpoints_state ( breakpoints_backup ) _logger . e_debug ( "let_variable(%s) => %s" , let_expression , error_message or 'succeed' ) return error_message
Let a frame s var with a value by building then eval a let expression with breakoints disabled .
43,977
def setup_step_into ( self , frame , pure = False ) : self . frame_calling = frame if pure : self . frame_stop = None else : self . frame_stop = frame self . frame_return = None self . frame_suspend = False self . pending_stop = True return
Setup debugger for a stepInto
43,978
def setup_step_out ( self , frame ) : self . frame_calling = None self . frame_stop = None self . frame_return = frame . f_back self . frame_suspend = False self . pending_stop = True return
Setup debugger for a stepOut
43,979
def setup_suspend ( self ) : self . frame_calling = None self . frame_stop = None self . frame_return = None self . frame_suspend = True self . pending_stop = True self . enable_tracing ( ) return
Setup debugger to suspend execution
43,980
def setup_resume ( self ) : self . frame_calling = None self . frame_stop = None self . frame_return = None self . frame_suspend = False self . pending_stop = False if not IKBreakpoint . any_active_breakpoint : self . disable_tracing ( ) return
Setup debugger to resume execution
43,981
def should_stop_here ( self , frame ) : if self . frame_calling and self . frame_calling == frame . f_back : return True if frame == self . frame_stop : return True if frame == self . frame_return : return True if self . frame_suspend : return True return False
Called by dispatch function to check wether debugger must stop at this frame . Note that we test step into first to give a chance to stepOver in case user click on stepInto on a no call line .
43,982
def should_break_here ( self , frame ) : c_file_name = self . canonic ( frame . f_code . co_filename ) if not c_file_name in IKBreakpoint . breakpoints_files : return False bp = IKBreakpoint . lookup_effective_breakpoint ( c_file_name , frame . f_lineno , frame ) return True if bp else False
Check wether there is a breakpoint at this frame .
43,983
def get_threads ( self ) : thread_list = { } for thread in threading . enumerate ( ) : thread_ident = thread . ident thread_list [ thread_ident ] = { "ident" : thread_ident , "name" : thread . name , "is_debugger" : thread_ident == self . debugger_thread_ident , "is_debugged" : thread_ident == self . debugged_thread_ident } return thread_list
Returns a dict of all threads and indicates thread being debugged . key is thread ident and values thread info . Information from this list can be used to swap thread being debugged .
43,984
def set_debugged_thread ( self , target_thread_ident = None ) : if target_thread_ident is None : self . debugged_thread_ident = None self . debugged_thread_name = '' return { "result" : self . get_threads ( ) , "error" : "" } thread_list = self . get_threads ( ) if target_thread_ident not in thread_list : return { "result" : None , "error" : "No thread with ident:%s." % target_thread_ident } if thread_list [ target_thread_ident ] [ 'is_debugger' ] : return { "result" : None , "error" : "Cannot debug IKPdb tracer (sadly...)." } self . debugged_thread_ident = target_thread_ident self . debugged_thread_name = thread_list [ target_thread_ident ] [ 'name' ] return { "result" : self . get_threads ( ) , "error" : "" }
Allows to reset or set the thread to debug .
43,985
def dump_tracing_state ( self , context ) : _logger . x_debug ( "Dumping all threads Tracing state: (%s)" % context ) _logger . x_debug ( " self.tracing_enabled=%s" % self . tracing_enabled ) _logger . x_debug ( " self.execution_started=%s" % self . execution_started ) _logger . x_debug ( " self.status=%s" % self . status ) _logger . x_debug ( " self.frame_beginning=%s" % self . frame_beginning ) _logger . x_debug ( " self.debugger_thread_ident=%s" % self . debugger_thread_ident ) if False : for thr in threading . enumerate ( ) : is_current_thread = thr . ident == threading . current_thread ( ) . ident _logger . x_debug ( " Thread: %s, %s %s" % ( thr . name , thr . ident , "<= Current*" if is_current_thread else '' ) ) a_frame = sys . _current_frames ( ) [ thr . ident ] while a_frame : flags = [ ] if a_frame == self . frame_beginning : flags . append ( "beginning" ) if a_frame == inspect . currentframe ( ) : flags . append ( "current" ) if flags : flags_str = "**" + "," . join ( flags ) else : flags_str = "" _logger . x_debug ( " => %s, %s:%s(%s) | %s %s" % ( a_frame , a_frame . f_code . co_filename , a_frame . f_lineno , a_frame . f_code . co_name , a_frame . f_trace , flags_str ) ) a_frame = a_frame . f_back
A debug tool to dump all threads tracing state
43,986
def disable_tracing ( self ) : _logger . x_debug ( "disable_tracing()" ) if self . tracing_enabled and self . execution_started : threading . settrace ( None ) iksettrace3 . _set_trace_off ( ) self . tracing_enabled = False return self . tracing_enabled
Disable tracing if it is disabled and debugged program is running else do nothing .
43,987
def _get_elements ( self , url , key , eclass , id = None , name = None ) : if id is not None and name is not None : raise ValueError ( "id and name cannot specified together" ) json_elements = self . rest_client . make_request ( url ) [ key ] return [ eclass ( element , self . rest_client ) for element in json_elements if _exact_resource ( element , id ) and _matching_resource ( element , name ) ]
Get elements matching id or name
43,988
def get_domain ( self ) : if hasattr ( self , 'domain' ) : return Domain ( self . rest_client . make_request ( self . domain ) , self . rest_client )
Get the Streams domain for the instance that owns this view .
43,989
def get_instance ( self ) : return Instance ( self . rest_client . make_request ( self . instance ) , self . rest_client )
Get the Streams instance that owns this view .
43,990
def get_job ( self ) : return Job ( self . rest_client . make_request ( self . job ) , self . rest_client )
Get the Streams job that owns this view .
43,991
def stop_data_fetch ( self ) : if self . _data_fetcher : self . _data_fetcher . stop . set ( ) self . _data_fetcher = None
Stops the thread that fetches data from the Streams view server .
43,992
def start_data_fetch ( self ) : self . stop_data_fetch ( ) self . _data_fetcher = _ViewDataFetcher ( self , self . _tuple_fn ) t = threading . Thread ( target = self . _data_fetcher ) t . start ( ) return self . _data_fetcher . items
Starts a thread that fetches data from the Streams view server .
43,993
def fetch_tuples ( self , max_tuples = 20 , timeout = None ) : tuples = list ( ) if timeout is None : while len ( tuples ) < max_tuples : fetcher = self . _data_fetcher if not fetcher : break tuples . append ( fetcher . items . get ( ) ) return tuples timeout = float ( timeout ) end = time . time ( ) + timeout while len ( tuples ) < max_tuples : qto = end - time . time ( ) if qto <= 0 : break try : fetcher = self . _data_fetcher if not fetcher : break tuples . append ( fetcher . items . get ( timeout = qto ) ) except queue . Empty : break return tuples
Fetch a number of tuples from this view .
43,994
def retrieve_log_trace ( self , filename = None , dir = None ) : if hasattr ( self , "applicationLogTrace" ) and self . applicationLogTrace is not None : logger . debug ( "Retrieving application logs from: " + self . applicationLogTrace ) if not filename : filename = _file_name ( 'job' , self . id , '.tar.gz' ) return self . rest_client . _retrieve_file ( self . applicationLogTrace , filename , dir , 'application/x-compressed' ) else : return None
Retrieves the application log and trace files of the job and saves them as a compressed tar file .
43,995
def cancel ( self , force = False ) : return self . rest_client . _sc . _delegator . _cancel_job ( self , force )
Cancel this job .
43,996
def get_metrics ( self , name = None ) : return self . _get_elements ( self . metrics , 'metrics' , Metric , name = name )
Get metrics for this operator .
43,997
def get_host ( self ) : if hasattr ( self , 'host' ) and self . host : return Host ( self . rest_client . make_request ( self . host ) , self . rest_client )
Get resource this operator is currently executing in . If the operator is running on an externally managed resource None is returned .
43,998
def get_pe ( self ) : return PE ( self . rest_client . make_request ( self . pe ) , self . rest_client )
Get the Streams processing element this operator is executing in .
43,999
def retrieve_trace ( self , filename = None , dir = None ) : if hasattr ( self , "applicationTrace" ) and self . applicationTrace is not None : logger . debug ( "Retrieving PE trace: " + self . applicationTrace ) if not filename : filename = _file_name ( 'pe' , self . id , '.trace' ) return self . rest_client . _retrieve_file ( self . applicationTrace , filename , dir , 'text/plain' ) else : return None
Retrieves the application trace files for this PE and saves them as a plain text file .