idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
42,100
def plot ( self , fname = None ) : plt . figure ( ) xy = sorted ( zip ( self . x , self . smooth_result ) ) x , y = zip ( * xy ) plt . plot ( x , y , '-' ) plt . plot ( self . x , self . y , '.' ) if fname : plt . savefig ( fname ) else : plt . show ( ) plt . close ( )
Plot the input data and resulting smooth .
42,101
def compute ( self ) : self . _compute_window_size ( ) smooth = [ ] residual = [ ] x , y = self . x , self . y self . _update_values_in_window ( ) self . _update_mean_in_window ( ) self . _update_variance_in_window ( ) for i , ( xi , yi ) in enumerate ( zip ( x , y ) ) : if ( ( i - self . _neighbors_on_each_side ) > 0.0 and ( i + self . _neighbors_on_each_side ) < len ( x ) ) : self . _advance_window ( ) smooth_here = self . _compute_smooth_during_construction ( xi ) residual_here = self . _compute_cross_validated_residual_here ( xi , yi , smooth_here ) smooth . append ( smooth_here ) residual . append ( residual_here ) self . _store_unsorted_results ( smooth , residual )
Perform the smoothing operations .
42,102
def _update_values_in_window ( self ) : window_bound_upper = self . _window_bound_lower + self . window_size self . _x_in_window = self . x [ self . _window_bound_lower : window_bound_upper ] self . _y_in_window = self . y [ self . _window_bound_lower : window_bound_upper ]
Update which values are in the current window .
42,103
def _update_mean_in_window ( self ) : self . _mean_x_in_window = numpy . mean ( self . _x_in_window ) self . _mean_y_in_window = numpy . mean ( self . _y_in_window )
Compute mean in window the slow way . useful for first step .
42,104
def _advance_window ( self ) : x_to_remove , y_to_remove = self . _x_in_window [ 0 ] , self . _y_in_window [ 0 ] self . _window_bound_lower += 1 self . _update_values_in_window ( ) x_to_add , y_to_add = self . _x_in_window [ - 1 ] , self . _y_in_window [ - 1 ] self . _remove_observation ( x_to_remove , y_to_remove ) self . _add_observation ( x_to_add , y_to_add )
Update values in current window and the current window means and variances .
42,105
def _add_observation_to_means ( self , xj , yj ) : self . _mean_x_in_window = ( ( self . window_size * self . _mean_x_in_window + xj ) / ( self . window_size + 1.0 ) ) self . _mean_y_in_window = ( ( self . window_size * self . _mean_y_in_window + yj ) / ( self . window_size + 1.0 ) )
Update the means without recalculating for the addition of one observation .
42,106
def _remove_observation_from_means ( self , xj , yj ) : self . _mean_x_in_window = ( ( self . window_size * self . _mean_x_in_window - xj ) / ( self . window_size - 1.0 ) ) self . _mean_y_in_window = ( ( self . window_size * self . _mean_y_in_window - yj ) / ( self . window_size - 1.0 ) )
Update the means without recalculating for the deletion of one observation .
42,107
def _add_observation_to_variances ( self , xj , yj ) : term1 = ( self . window_size + 1.0 ) / self . window_size * ( xj - self . _mean_x_in_window ) self . _covariance_in_window += term1 * ( yj - self . _mean_y_in_window ) self . _variance_in_window += term1 * ( xj - self . _mean_x_in_window )
Quickly update the variance and co - variance for the addition of one observation .
42,108
def _compute_smooth_during_construction ( self , xi ) : if self . _variance_in_window : beta = self . _covariance_in_window / self . _variance_in_window alpha = self . _mean_y_in_window - beta * self . _mean_x_in_window value_of_smooth_here = beta * ( xi ) + alpha else : value_of_smooth_here = 0.0 return value_of_smooth_here
Evaluate value of smooth at x - value xi .
42,109
def _compute_cross_validated_residual_here ( self , xi , yi , smooth_here ) : denom = ( 1.0 - 1.0 / self . window_size - ( xi - self . _mean_x_in_window ) ** 2 / self . _variance_in_window ) if denom == 0.0 : return 1.0 return abs ( ( yi - smooth_here ) / denom )
Compute cross validated residual .
42,110
def build_sample_ace_problem_breiman85 ( N = 200 ) : x_cubed = numpy . random . standard_normal ( N ) x = scipy . special . cbrt ( x_cubed ) noise = numpy . random . standard_normal ( N ) y = numpy . exp ( ( x ** 3.0 ) + noise ) return [ x ] , y
Sample problem from Breiman 1985 .
42,111
def run_breiman85 ( ) : x , y = build_sample_ace_problem_breiman85 ( 200 ) ace_solver = ace . ACESolver ( ) ace_solver . specify_data_set ( x , y ) ace_solver . solve ( ) try : ace . plot_transforms ( ace_solver , 'sample_ace_breiman85.png' ) except ImportError : pass return ace_solver
Run Breiman 85 sample .
42,112
def run_breiman2 ( ) : x , y = build_sample_ace_problem_breiman2 ( 500 ) ace_solver = ace . ACESolver ( ) ace_solver . specify_data_set ( x , y ) ace_solver . solve ( ) try : plt = ace . plot_transforms ( ace_solver , None ) except ImportError : pass plt . subplot ( 1 , 2 , 1 ) phi = numpy . sin ( 2.0 * numpy . pi * x [ 0 ] ) plt . plot ( x [ 0 ] , phi , label = 'analytic' ) plt . legend ( ) plt . subplot ( 1 , 2 , 2 ) y = numpy . exp ( phi ) plt . plot ( y , phi , label = 'analytic' ) plt . legend ( loc = 'lower right' ) plt . savefig ( 'no_noise_linear_x.png' ) return ace_solver
Run Breiman s other sample problem .
42,113
def publish ( self , topic , messages , key = None ) : if not isinstance ( messages , list ) : messages = [ messages ] first = True success = False if key is None : key = int ( time . time ( ) * 1000 ) messages = [ encodeutils . to_utf8 ( m ) for m in messages ] key = bytes ( str ( key ) , 'utf-8' ) if PY3 else str ( key ) while not success : try : self . _producer . send_messages ( topic , key , * messages ) success = True except Exception : if first : log . warn ( "Failed send on topic {}, clear metadata and retry" . format ( topic ) ) self . _kafka . reset_topic_metadata ( topic ) first = False continue log . exception ( 'Error publishing to {} topic.' . format ( topic ) ) raise
Takes messages and puts them on the supplied kafka topic
42,114
def create_gzip_message ( payloads , key = None , compresslevel = None ) : message_set = KafkaProtocol . _encode_message_set ( [ create_message ( payload , pl_key ) for payload , pl_key in payloads ] ) gzipped = gzip_encode ( message_set , compresslevel = compresslevel ) codec = ATTRIBUTE_CODEC_MASK & CODEC_GZIP return Message ( 0 , 0x00 | codec , key , gzipped )
Construct a Gzipped Message containing multiple Messages
42,115
def _decode_message_set_iter ( cls , data ) : cur = 0 read_message = False while cur < len ( data ) : try : ( ( offset , ) , cur ) = relative_unpack ( '>q' , data , cur ) ( msg , cur ) = read_int_string ( data , cur ) for ( offset , message ) in KafkaProtocol . _decode_message ( msg , offset ) : read_message = True yield OffsetAndMessage ( offset , message ) except BufferUnderflowError : if read_message is False : raise ConsumerFetchSizeTooSmall ( ) else : raise StopIteration ( )
Iteratively decode a MessageSet
42,116
def _decode_message ( cls , data , offset ) : ( ( crc , magic , att ) , cur ) = relative_unpack ( '>IBB' , data , 0 ) if crc != crc32 ( data [ 4 : ] ) : raise ChecksumError ( "Message checksum failed" ) ( key , cur ) = read_int_string ( data , cur ) ( value , cur ) = read_int_string ( data , cur ) codec = att & ATTRIBUTE_CODEC_MASK if codec == CODEC_NONE : yield ( offset , Message ( magic , att , key , value ) ) elif codec == CODEC_GZIP : gz = gzip_decode ( value ) for ( offset , msg ) in KafkaProtocol . _decode_message_set_iter ( gz ) : yield ( offset , msg ) elif codec == CODEC_SNAPPY : snp = snappy_decode ( value ) for ( offset , msg ) in KafkaProtocol . _decode_message_set_iter ( snp ) : yield ( offset , msg )
Decode a single Message
42,117
def decode_offset_response ( cls , data ) : ( ( correlation_id , num_topics ) , cur ) = relative_unpack ( '>ii' , data , 0 ) for _ in range ( num_topics ) : ( topic , cur ) = read_short_string ( data , cur ) ( ( num_partitions , ) , cur ) = relative_unpack ( '>i' , data , cur ) for _ in range ( num_partitions ) : ( ( partition , error , num_offsets , ) , cur ) = relative_unpack ( '>ihi' , data , cur ) offsets = [ ] for k in range ( num_offsets ) : ( ( offset , ) , cur ) = relative_unpack ( '>q' , data , cur ) offsets . append ( offset ) yield OffsetResponse ( topic , partition , error , tuple ( offsets ) )
Decode bytes to an OffsetResponse
42,118
def encode_offset_commit_request ( cls , client_id , correlation_id , group , payloads ) : grouped_payloads = group_by_topic_and_partition ( payloads ) message = [ ] message . append ( cls . _encode_message_header ( client_id , correlation_id , KafkaProtocol . OFFSET_COMMIT_KEY ) ) message . append ( write_short_string ( group ) ) message . append ( struct . pack ( '>i' , len ( grouped_payloads ) ) ) for topic , topic_payloads in grouped_payloads . items ( ) : message . append ( write_short_string ( topic ) ) message . append ( struct . pack ( '>i' , len ( topic_payloads ) ) ) for partition , payload in topic_payloads . items ( ) : message . append ( struct . pack ( '>iq' , partition , payload . offset ) ) message . append ( write_short_string ( payload . metadata ) ) msg = b'' . join ( message ) return struct . pack ( '>i%ds' % len ( msg ) , len ( msg ) , msg )
Encode some OffsetCommitRequest structs
42,119
def encode_offset_fetch_request ( cls , client_id , correlation_id , group , payloads , from_kafka = False ) : grouped_payloads = group_by_topic_and_partition ( payloads ) message = [ ] reqver = 1 if from_kafka else 0 message . append ( cls . _encode_message_header ( client_id , correlation_id , KafkaProtocol . OFFSET_FETCH_KEY , version = reqver ) ) message . append ( write_short_string ( group ) ) message . append ( struct . pack ( '>i' , len ( grouped_payloads ) ) ) for topic , topic_payloads in grouped_payloads . items ( ) : message . append ( write_short_string ( topic ) ) message . append ( struct . pack ( '>i' , len ( topic_payloads ) ) ) for partition , payload in topic_payloads . items ( ) : message . append ( struct . pack ( '>i' , partition ) ) msg = b'' . join ( message ) return struct . pack ( '>i%ds' % len ( msg ) , len ( msg ) , msg )
Encode some OffsetFetchRequest structs . The request is encoded using version 0 if from_kafka is false indicating a request for Zookeeper offsets . It is encoded using version 1 otherwise indicating a request for Kafka offsets .
42,120
def _get_module ( target ) : filepath , sep , namespace = target . rpartition ( '|' ) if sep and not filepath : raise BadDirectory ( "Path to file not supplied." ) module , sep , class_or_function = namespace . rpartition ( ':' ) if ( sep and not module ) or ( filepath and not module ) : raise MissingModule ( "Need a module path for %s (%s)" % ( namespace , target ) ) if filepath and filepath not in sys . path : if not os . path . isdir ( filepath ) : raise BadDirectory ( "No such directory: '%s'" % filepath ) sys . path . append ( filepath ) if not class_or_function : raise MissingMethodOrFunction ( "No Method or Function specified in '%s'" % target ) if module : try : __import__ ( module ) except ImportError as e : raise ImportFailed ( "Failed to import '%s'. " "Error: %s" % ( module , e ) ) klass , sep , function = class_or_function . rpartition ( '.' ) return module , klass , function
Import a named class module method or function .
42,121
def load ( target , source_module = None ) : module , klass , function = _get_module ( target ) if not module and source_module : module = source_module if not module : raise MissingModule ( "No module name supplied or source_module provided." ) actual_module = sys . modules [ module ] if not klass : return getattr ( actual_module , function ) class_object = getattr ( actual_module , klass ) if function : return getattr ( class_object , function ) return class_object
Get the actual implementation of the target .
42,122
def process_child ( node ) : if isinstance ( node , sphinx . addnodes . desc_addname ) : if len ( node . children ) == 1 : child = node . children [ 0 ] text = child . astext ( ) if text . startswith ( "wpilib." ) and text . endswith ( "." ) : text = "." . join ( text . split ( "." ) [ : - 2 ] ) + "." node . children [ 0 ] = docutils . nodes . Text ( text ) elif isinstance ( node , docutils . nodes . literal ) : child = node . children [ 0 ] text = child . astext ( ) if text . startswith ( "wpilib." ) : stext = text . split ( "." ) text = "." . join ( stext [ : - 2 ] + [ stext [ - 1 ] ] ) node . children [ 0 ] = docutils . nodes . Text ( text ) for child in node . children : process_child ( child )
This function changes class references to not have the intermediate module name by hacking at the doctree
42,123
def read_column_data_from_txt ( fname ) : datafile = open ( fname ) datarows = [ ] for line in datafile : datarows . append ( [ float ( li ) for li in line . split ( ) ] ) datacols = list ( zip ( * datarows ) ) x_values = datacols [ 1 : ] y_values = datacols [ 0 ] return x_values , y_values
Read data from a simple text file .
42,124
def build_model_from_txt ( self , fname ) : x_values , y_values = read_column_data_from_txt ( fname ) self . build_model_from_xy ( x_values , y_values )
Construct the model and perform regressions based on data in a txt file .
42,125
def build_model_from_xy ( self , x_values , y_values ) : self . init_ace ( x_values , y_values ) self . run_ace ( ) self . build_interpolators ( )
Construct the model and perform regressions based on x y data .
42,126
def build_interpolators ( self ) : self . phi_continuous = [ ] for xi , phii in zip ( self . ace . x , self . ace . x_transforms ) : self . phi_continuous . append ( interp1d ( xi , phii ) ) self . inverse_theta_continuous = interp1d ( self . ace . y_transform , self . ace . y )
Compute 1 - D interpolation functions for all the transforms so they re continuous ..
42,127
def eval ( self , x_values ) : if len ( x_values ) != len ( self . phi_continuous ) : raise ValueError ( 'x_values must have length equal to the number of independent variables ' '({0}) rather than {1}.' . format ( len ( self . phi_continuous ) , len ( x_values ) ) ) sum_phi = sum ( [ phi ( xi ) for phi , xi in zip ( self . phi_continuous , x_values ) ] ) return float ( self . inverse_theta_continuous ( sum_phi ) )
Evaluate the ACE regression at any combination of independent variable values .
42,128
def yesno ( prompt ) : prompt += " [y/n]" a = "" while a not in [ "y" , "n" ] : a = input ( prompt ) . lower ( ) return a == "y"
Returns True if user answers y
42,129
def retry ( retries = KAFKA_WAIT_RETRIES , delay = KAFKA_WAIT_INTERVAL , check_exceptions = ( ) ) : def decorator ( func ) : def f_retry ( * args , ** kwargs ) : for i in range ( 1 , retries + 1 ) : try : return func ( * args , ** kwargs ) except check_exceptions + ( Exception , ) as exc : if i < retries : logger . info ( 'Connection attempt %d of %d failed' , i , retries ) if isinstance ( exc , check_exceptions ) : logger . debug ( 'Caught known exception, retrying...' , exc_info = True ) else : logger . warn ( 'Caught unknown exception, retrying...' , exc_info = True ) else : logger . exception ( 'Failed after %d attempts' , retries ) raise time . sleep ( delay ) return f_retry return decorator
Retry decorator .
42,130
def check_topics ( client , req_topics ) : client . update_cluster ( ) logger . debug ( 'Found topics: %r' , client . topics . keys ( ) ) for req_topic in req_topics : if req_topic not in client . topics . keys ( ) : err_topic_not_found = 'Topic not found: {}' . format ( req_topic ) logger . warning ( err_topic_not_found ) raise TopicNotFound ( err_topic_not_found ) topic = client . topics [ req_topic ] if not topic . partitions : err_topic_no_part = 'Topic has no partitions: {}' . format ( req_topic ) logger . warning ( err_topic_no_part ) raise TopicNoPartition ( err_topic_no_part ) logger . info ( 'Topic is ready: %s' , req_topic )
Check for existence of provided topics in Kafka .
42,131
def send ( self , request_id , payload ) : log . debug ( "About to send %d bytes to Kafka, request %d" % ( len ( payload ) , request_id ) ) if not self . _sock : self . reinit ( ) try : self . _sock . sendall ( payload ) except socket . error : log . exception ( 'Unable to send payload to Kafka' ) self . _raise_connection_error ( )
Send a request to Kafka
42,132
def recv ( self , request_id ) : log . debug ( "Reading response %d from Kafka" % request_id ) if not self . _sock : self . reinit ( ) resp = self . _read_bytes ( 4 ) ( size , ) = struct . unpack ( '>i' , resp ) resp = self . _read_bytes ( size ) return resp
Get a response packet from Kafka
42,133
def copy ( self ) : c = copy . deepcopy ( self ) c . host = copy . copy ( self . host ) c . port = copy . copy ( self . port ) c . timeout = copy . copy ( self . timeout ) c . _sock = None return c
Create an inactive copy of the connection object suitable for passing to a background thread .
42,134
def close ( self ) : log . debug ( "Closing socket connection for %s:%d" % ( self . host , self . port ) ) if self . _sock : try : self . _sock . shutdown ( socket . SHUT_RDWR ) except socket . error : pass self . _sock . close ( ) self . _sock = None else : log . debug ( "No socket found to close!" )
Shutdown and close the connection socket
42,135
def configure ( self , ** configs ) : configs = self . _deprecate_configs ( ** configs ) self . _config = { } for key in self . DEFAULT_CONFIG : self . _config [ key ] = configs . pop ( key , self . DEFAULT_CONFIG [ key ] ) if configs : raise KafkaConfigurationError ( 'Unknown configuration key(s): ' + str ( list ( configs . keys ( ) ) ) ) if self . _config [ 'auto_commit_enable' ] : if not self . _config [ 'group_id' ] : raise KafkaConfigurationError ( 'KafkaConsumer configured to auto-commit ' 'without required consumer group (group_id)' ) if self . _config [ 'auto_commit_enable' ] : logger . info ( "Configuring consumer to auto-commit offsets" ) self . _reset_auto_commit ( ) if not self . _config [ 'bootstrap_servers' ] : raise KafkaConfigurationError ( 'bootstrap_servers required to configure KafkaConsumer' ) self . _client = KafkaClient ( self . _config [ 'bootstrap_servers' ] , client_id = self . _config [ 'client_id' ] , timeout = ( self . _config [ 'socket_timeout_ms' ] / 1000.0 ) )
Configure the consumer instance
42,136
def next ( self ) : self . _set_consumer_timeout_start ( ) while True : try : return six . next ( self . _get_message_iterator ( ) ) except StopIteration : self . _reset_message_iterator ( ) self . _check_consumer_timeout ( )
Return the next available message
42,137
def offsets ( self , group = None ) : if not group : return { 'fetch' : self . offsets ( 'fetch' ) , 'commit' : self . offsets ( 'commit' ) , 'task_done' : self . offsets ( 'task_done' ) , 'highwater' : self . offsets ( 'highwater' ) } else : return dict ( deepcopy ( getattr ( self . _offsets , group ) ) )
Get internal consumer offset values
42,138
def task_done ( self , message ) : topic_partition = ( message . topic , message . partition ) if topic_partition not in self . _topics : logger . warning ( 'Unrecognized topic/partition in task_done message: ' '{0}:{1}' . format ( * topic_partition ) ) return False offset = message . offset prev_done = self . _offsets . task_done [ topic_partition ] if prev_done is not None and offset != ( prev_done + 1 ) : logger . warning ( 'Marking task_done on a non-continuous offset: %d != %d + 1' , offset , prev_done ) prev_commit = self . _offsets . commit [ topic_partition ] if prev_commit is not None and ( ( offset + 1 ) <= prev_commit ) : logger . warning ( 'Marking task_done on a previously committed offset?: %d (+1) <= %d' , offset , prev_commit ) self . _offsets . task_done [ topic_partition ] = offset if self . _does_auto_commit_messages ( ) : self . _incr_auto_commit_message_count ( ) if self . _should_auto_commit ( ) : self . commit ( ) return True
Mark a fetched message as consumed .
42,139
def as_json ( data , ** kwargs ) : if 'sort_keys' not in kwargs : kwargs [ 'sort_keys' ] = False if 'ensure_ascii' not in kwargs : kwargs [ 'ensure_ascii' ] = False data = json . dumps ( data , ** kwargs ) return data
Writes data as json .
42,140
def read_body ( payload , content_type = JSON_CONTENT_TYPE ) : if content_type not in _READABLE_CONTENT_TYPES : msg = ( 'Cannot read %s, not in %s' % ( content_type , _READABLE_CONTENT_TYPES ) ) raise exceptions . UnsupportedContentTypeException ( msg ) try : content = payload . read ( ) if not content : return None except Exception as ex : raise exceptions . UnreadableContentError ( str ( ex ) ) return _READABLE_CONTENT_TYPES [ content_type ] ( content )
Reads HTTP payload according to given content_type .
42,141
def __process_idle_events ( self ) : while True : try : callable , args = self . queue . get ( block = False ) except queue . Empty : break callable ( * args )
This should never be called directly it is called via an event and should always be on the GUI thread
42,142
def timer_fired ( self ) : self . __process_idle_events ( ) self . update_widgets ( ) delay = 100 self . root . after ( delay , self . timer_fired )
Polling loop for events from other threads
42,143
def snappy_encode ( payload , xerial_compatible = False , xerial_blocksize = 32 * 1024 ) : if not has_snappy ( ) : raise NotImplementedError ( "Snappy codec is not available" ) if xerial_compatible : def _chunker ( ) : for i in xrange ( 0 , len ( payload ) , xerial_blocksize ) : yield payload [ i : i + xerial_blocksize ] out = BytesIO ( ) header = b'' . join ( [ struct . pack ( '!' + fmt , dat ) for fmt , dat in zip ( _XERIAL_V1_FORMAT , _XERIAL_V1_HEADER ) ] ) out . write ( header ) for chunk in _chunker ( ) : block = snappy . compress ( chunk ) block_size = len ( block ) out . write ( struct . pack ( '!i' , block_size ) ) out . write ( block ) out . seek ( 0 ) return out . read ( ) else : return snappy . compress ( payload )
Encodes the given data with snappy if xerial_compatible is set then the stream is encoded in a fashion compatible with the xerial snappy library
42,144
def relpath ( path ) : return os . path . normpath ( os . path . join ( os . path . abspath ( os . path . dirname ( __file__ ) ) , path ) )
Path helper gives you a path relative to this file
42,145
def init ( policy_file = None , rules = None , default_rule = None , use_conf = True ) : global _ENFORCER global saved_file_rules if not _ENFORCER : _ENFORCER = policy . Enforcer ( CONF , policy_file = policy_file , rules = rules , default_rule = default_rule , use_conf = use_conf ) register_rules ( _ENFORCER ) _ENFORCER . load_rules ( ) current_file_rules = _ENFORCER . file_rules current_file_rules = _serialize_rules ( current_file_rules ) if saved_file_rules != current_file_rules : _warning_for_deprecated_user_based_rules ( current_file_rules ) saved_file_rules = copy . deepcopy ( current_file_rules )
Init an Enforcer class .
42,146
def _serialize_rules ( rules ) : result = [ ( rule_name , str ( rule ) ) for rule_name , rule in rules . items ( ) ] return sorted ( result , key = lambda rule : rule [ 0 ] )
Serialize all the Rule object as string .
42,147
def _warning_for_deprecated_user_based_rules ( rules ) : for rule in rules : if [ resource for resource in USER_BASED_RESOURCES if resource in rule [ 0 ] ] : continue if 'user_id' in KEY_EXPR . findall ( rule [ 1 ] ) : LOG . warning ( _LW ( "The user_id attribute isn't supported in the " "rule '%s'. All the user_id based policy " "enforcement will be removed in the " "future." ) , rule [ 0 ] )
Warning user based policy enforcement used in the rule but the rule doesn t support it .
42,148
def authorize ( context , action , target , do_raise = True ) : init ( ) credentials = context . to_policy_values ( ) try : result = _ENFORCER . authorize ( action , target , credentials , do_raise = do_raise , action = action ) return result except policy . PolicyNotRegistered : LOG . exception ( 'Policy not registered' ) raise except Exception : LOG . debug ( 'Policy check for %(action)s failed with credentials ' '%(credentials)s' , { 'action' : action , 'credentials' : credentials } ) raise
Verify that the action is valid on the target in this context .
42,149
def check_is_admin ( context ) : init ( ) credentials = context . to_policy_values ( ) target = credentials return _ENFORCER . authorize ( 'admin_required' , target , credentials )
Check if roles contains admin role according to policy settings .
42,150
def set_rules ( rules , overwrite = True , use_conf = False ) : init ( use_conf = False ) _ENFORCER . set_rules ( rules , overwrite , use_conf )
Set rules based on the provided dict of rules .
42,151
def verify_deprecated_policy ( old_policy , new_policy , default_rule , context ) : if _ENFORCER : current_rule = str ( _ENFORCER . rules [ old_policy ] ) else : current_rule = None if current_rule != default_rule : LOG . warning ( "Start using the new action '{0}'. The existing " "action '{1}' is being deprecated and will be " "removed in future release." . format ( new_policy , old_policy ) ) target = { 'project_id' : context . project_id , 'user_id' : context . user_id } return authorize ( context = context , action = old_policy , target = target ) else : return False
Check the rule of the deprecated policy action
42,152
def build_sample_ace_problem_wang04 ( N = 100 ) : x = [ numpy . random . uniform ( - 1 , 1 , size = N ) for _i in range ( 0 , 5 ) ] noise = numpy . random . standard_normal ( N ) y = numpy . log ( 4.0 + numpy . sin ( 4 * x [ 0 ] ) + numpy . abs ( x [ 1 ] ) + x [ 2 ] ** 2 + x [ 3 ] ** 3 + x [ 4 ] + 0.1 * noise ) return x , y
Build sample problem from Wang 2004 .
42,153
def run_wang04 ( ) : x , y = build_sample_ace_problem_wang04 ( N = 200 ) ace_solver = ace . ACESolver ( ) ace_solver . specify_data_set ( x , y ) ace_solver . solve ( ) try : ace . plot_transforms ( ace_solver , 'ace_transforms_wang04.png' ) ace . plot_input ( ace_solver , 'ace_input_wang04.png' ) except ImportError : pass return ace_solver
Run sample problem .
42,154
def add_robot ( self , controller ) : controller . on_mode_change ( self . _on_robot_mode_change ) self . robots . append ( controller )
Add a robot controller
42,155
def set_joystick ( self , x , y , n ) : self . robots [ n ] . set_joystick ( x , y )
Receives joystick values from the SnakeBoard x y Coordinates n Robot number to give it to
42,156
def close ( self ) : if self . _state == ConnectionState . DISCONNECTED : return self . _set_state ( ConnectionState . DISCONNECTING ) logger . info ( '%s Disconnecting...' , self . fingerprint ) if self . _reconnect_task and not self . _reconnect_task . done ( ) : self . _reconnect_task . cancel ( ) self . _reconnect_task = None if self . _ping_task and not self . _ping_task . done ( ) : self . _ping_task . cancel ( ) self . _ping_task = None if self . _transport : self . _transport . close ( ) self . _transport = None self . _protocol = None self . _disconnect_waiter = None self . _db = _DbMock ( ) self . _set_state ( ConnectionState . DISCONNECTED )
Same as disconnect but not a coroutine i . e . it does not wait for disconnect to finish .
42,157
def eval ( self , expression , args = None , * , timeout = - 1.0 , push_subscribe = False ) -> _MethodRet : return self . _db . eval ( expression , args , timeout = timeout , push_subscribe = push_subscribe )
Eval request coroutine .
42,158
def select ( self , space , key = None , ** kwargs ) -> _MethodRet : return self . _db . select ( space , key , ** kwargs )
Select request coroutine .
42,159
def insert ( self , space , t , * , replace = False , timeout = - 1 ) -> _MethodRet : return self . _db . insert ( space , t , replace = replace , timeout = timeout )
Insert request coroutine .
42,160
def replace ( self , space , t , * , timeout = - 1.0 ) -> _MethodRet : return self . _db . replace ( space , t , timeout = timeout )
Replace request coroutine . Same as insert but replace .
42,161
def delete ( self , space , key , ** kwargs ) -> _MethodRet : return self . _db . delete ( space , key , ** kwargs )
Delete request coroutine .
42,162
def update ( self , space , key , operations , ** kwargs ) -> _MethodRet : return self . _db . update ( space , key , operations , ** kwargs )
Update request coroutine .
42,163
def compute ( self ) : self . _compute_primary_smooths ( ) self . _smooth_the_residuals ( ) self . _select_best_smooth_at_each_point ( ) self . _enhance_bass ( ) self . _smooth_best_span_estimates ( ) self . _apply_best_spans_to_primaries ( ) self . _smooth_interpolated_smooth ( ) self . _store_unsorted_results ( self . smooth_result , numpy . zeros ( len ( self . smooth_result ) ) )
Run the SuperSmoother .
42,164
def _compute_primary_smooths ( self ) : for span in DEFAULT_SPANS : smooth = smoother . perform_smooth ( self . x , self . y , span ) self . _primary_smooths . append ( smooth )
Compute fixed - span smooths with all of the default spans .
42,165
def _smooth_the_residuals ( self ) : for primary_smooth in self . _primary_smooths : smooth = smoother . perform_smooth ( self . x , primary_smooth . cross_validated_residual , MID_SPAN ) self . _residual_smooths . append ( smooth . smooth_result )
Apply the MID_SPAN to the residuals of the primary smooths .
42,166
def _smooth_best_span_estimates ( self ) : self . _smoothed_best_spans = smoother . perform_smooth ( self . x , self . _best_span_at_each_point , MID_SPAN )
Apply a MID_SPAN smooth to the best span estimates at each observation .
42,167
def _apply_best_spans_to_primaries ( self ) : self . smooth_result = [ ] for xi , best_span in enumerate ( self . _smoothed_best_spans . smooth_result ) : primary_values = [ s . smooth_result [ xi ] for s in self . _primary_smooths ] best_value = numpy . interp ( best_span , DEFAULT_SPANS , primary_values ) self . smooth_result . append ( best_value )
Apply best spans .
42,168
def _smooth_interpolated_smooth ( self ) : smoothed_results = smoother . perform_smooth ( self . x , self . smooth_result , TWEETER_SPAN ) self . smooth_result = smoothed_results . smooth_result
Smooth interpolated results with tweeter span .
42,169
def _mp_consume ( client , group , topic , queue , size , events , ** consumer_options ) : interval = 1 while not events . exit . is_set ( ) : try : client . reinit ( ) consumer = SimpleConsumer ( client , group , topic , auto_commit = False , auto_commit_every_n = None , auto_commit_every_t = None , ** consumer_options ) consumer . provide_partition_info ( ) while True : events . start . wait ( ) if events . exit . is_set ( ) : break count = 0 message = consumer . get_message ( ) if message : while True : try : queue . put ( message , timeout = FULL_QUEUE_WAIT_TIME_SECONDS ) break except queue . Full : if events . exit . is_set ( ) : break count += 1 if count == size . value : events . pause . wait ( ) else : time . sleep ( NO_MESSAGES_WAIT_TIME_SECONDS ) consumer . stop ( ) except KafkaError as e : log . error ( "Problem communicating with Kafka (%s), retrying in %d seconds..." % ( e , interval ) ) time . sleep ( interval ) interval = interval * 2 if interval * 2 < MAX_BACKOFF_SECONDS else MAX_BACKOFF_SECONDS
A child process worker which consumes messages based on the notifications given by the controller process
42,170
def rotate ( self , angle ) : self . angle = ( self . angle + angle ) % ( math . pi * 2.0 ) c = math . cos ( angle ) s = math . sin ( angle ) px , py = self . center def _rotate_point ( xy ) : x , y = xy x = x - px y = y - py return ( x * c - y * s ) + px , ( x * s + y * c ) + py self . pts = [ p for p in map ( lambda x : _rotate_point ( x ) , self . pts ) ]
This works . Rotates the object about its center . Angle is specified in radians
42,171
def publish ( self , topic , messages , key = None , timeout = 2 ) : if not isinstance ( messages , list ) : messages = [ messages ] try : for m in messages : m = encodeutils . safe_encode ( m , incoming = 'utf-8' ) self . _producer . produce ( topic , m , key , callback = KafkaProducer . delivery_report ) self . _producer . poll ( 0 ) return self . _producer . flush ( timeout ) except ( BufferError , confluent_kafka . KafkaException , NotImplementedError ) : log . exception ( u'Error publishing to {} topic.' . format ( topic ) ) raise
Publish messages to the topic .
42,172
def out_name ( stem , timestep = None ) : if timestep is not None : stem = ( stem + INT_FMT ) . format ( timestep ) return conf . core . outname + '_' + stem
Return StagPy out file name .
42,173
def saveplot ( fig , * name_args , close = True , ** name_kwargs ) : oname = out_name ( * name_args , ** name_kwargs ) fig . savefig ( '{}.{}' . format ( oname , conf . plot . format ) , format = conf . plot . format , bbox_inches = 'tight' ) if close : plt . close ( fig )
Save matplotlib figure .
42,174
def baredoc ( obj ) : doc = getdoc ( obj ) if not doc : return '' doc = doc . splitlines ( ) [ 0 ] return doc . rstrip ( ' .' ) . lstrip ( )
Return the first line of the docstring of an object .
42,175
def fmttime ( tin ) : aaa , bbb = '{:.2e}' . format ( tin ) . split ( 'e' ) bbb = int ( bbb ) return r'$t={} \times 10^{{{}}}$' . format ( aaa , bbb )
Return LaTeX expression with time in scientific notation .
42,176
def list_of_vars ( arg_plot ) : lovs = [ [ [ var for var in svars . split ( ',' ) if var ] for svars in pvars . split ( '.' ) if svars ] for pvars in arg_plot . split ( '-' ) if pvars ] lovs = [ [ slov for slov in lov if slov ] for lov in lovs if lov ] return [ lov for lov in lovs if lov ]
Construct list of variables per plot .
42,177
def set_of_vars ( lovs ) : return set ( var for pvars in lovs for svars in pvars for var in svars )
Build set of variables from list .
42,178
def get_rbounds ( step ) : if step . geom is not None : rcmb = step . geom . rcmb else : rcmb = step . sdat . par [ 'geometry' ] [ 'r_cmb' ] if step . sdat . par [ 'geometry' ] [ 'shape' ] . lower ( ) == 'cartesian' : rcmb = 0 rcmb = max ( rcmb , 0 ) return rcmb , rcmb + 1
Radial or vertical position of boundaries .
42,179
def fnames ( self , names ) : names = list ( names [ : len ( self . _fnames ) ] ) self . _fnames = names + self . _fnames [ len ( names ) : ]
Ensure constant size of fnames
42,180
def get_time_series ( sdat , var , tstart , tend ) : tseries = sdat . tseries_between ( tstart , tend ) if var in tseries . columns : series = tseries [ var ] time = None if var in phyvars . TIME : meta = phyvars . TIME [ var ] else : meta = phyvars . Vart ( var , None , '1' ) elif var in phyvars . TIME_EXTRA : meta = phyvars . TIME_EXTRA [ var ] series , time = meta . description ( sdat , tstart , tend ) meta = phyvars . Vart ( misc . baredoc ( meta . description ) , meta . kind , meta . dim ) else : raise UnknownTimeVarError ( var ) series , _ = sdat . scale ( series , meta . dim ) if time is not None : time , _ = sdat . scale ( time , 's' ) return series , time , meta
Extract or compute and rescale a time series .
42,181
def plot_time_series ( sdat , lovs ) : sovs = misc . set_of_vars ( lovs ) tseries = { } times = { } metas = { } for tvar in sovs : series , time , meta = get_time_series ( sdat , tvar , conf . time . tstart , conf . time . tend ) tseries [ tvar ] = series metas [ tvar ] = meta if time is not None : times [ tvar ] = time tseries [ 't' ] = get_time_series ( sdat , 't' , conf . time . tstart , conf . time . tend ) [ 0 ] _plot_time_list ( sdat , lovs , tseries , metas , times )
Plot requested time series .
42,182
def compstat ( sdat , tstart = None , tend = None ) : data = sdat . tseries_between ( tstart , tend ) time = data [ 't' ] . values delta_time = time [ - 1 ] - time [ 0 ] data = data . iloc [ : , 1 : ] . values mean = np . trapz ( data , x = time , axis = 0 ) / delta_time rms = np . sqrt ( np . trapz ( ( data - mean ) ** 2 , x = time , axis = 0 ) / delta_time ) with open ( misc . out_name ( 'statistics.dat' ) , 'w' ) as out_file : mean . tofile ( out_file , sep = ' ' , format = "%10.5e" ) out_file . write ( '\n' ) rms . tofile ( out_file , sep = ' ' , format = "%10.5e" ) out_file . write ( '\n' )
Compute statistics from series output by StagYY .
42,183
def cmd ( ) : sdat = StagyyData ( conf . core . path ) if sdat . tseries is None : return if conf . time . fraction is not None : if not 0 < conf . time . fraction <= 1 : raise InvalidTimeFractionError ( conf . time . fraction ) conf . time . tend = None t_0 = sdat . tseries . iloc [ 0 ] . loc [ 't' ] t_f = sdat . tseries . iloc [ - 1 ] . loc [ 't' ] conf . time . tstart = ( t_0 * conf . time . fraction + t_f * ( 1 - conf . time . fraction ) ) lovs = misc . list_of_vars ( conf . time . plot ) if lovs : plot_time_series ( sdat , lovs ) if conf . time . compstat : compstat ( sdat , conf . time . tstart , conf . time . tend )
Implementation of time subcommand .
42,184
def info_cmd ( ) : sdat = stagyydata . StagyyData ( conf . core . path ) lsnap = sdat . snaps . last lstep = sdat . steps . last print ( 'StagYY run in {}' . format ( sdat . path ) ) if lsnap . geom . threed : dimension = '{} x {} x {}' . format ( lsnap . geom . nxtot , lsnap . geom . nytot , lsnap . geom . nztot ) elif lsnap . geom . twod_xz : dimension = '{} x {}' . format ( lsnap . geom . nxtot , lsnap . geom . nztot ) else : dimension = '{} x {}' . format ( lsnap . geom . nytot , lsnap . geom . nztot ) if lsnap . geom . cartesian : print ( 'Cartesian' , dimension ) elif lsnap . geom . cylindrical : print ( 'Cylindrical' , dimension ) else : print ( 'Spherical' , dimension ) print ( 'Last timestep:' , ' istep: {}' . format ( lstep . istep ) , ' time: {}' . format ( lstep . timeinfo [ 't' ] ) , ' <T>: {}' . format ( lstep . timeinfo [ 'Tmean' ] ) , sep = '\n' ) print ( 'Last snapshot (istep {}):' . format ( lsnap . istep ) , ' isnap: {}' . format ( lsnap . isnap ) , ' time: {}' . format ( lsnap . timeinfo [ 't' ] ) , ' output fields: {}' . format ( ',' . join ( lsnap . fields ) ) , sep = '\n' )
Print basic information about StagYY run .
42,185
def var_cmd ( ) : print_all = not any ( val for _ , val in conf . var . opt_vals_ ( ) ) if print_all or conf . var . field : print ( 'field:' ) _layout ( phyvars . FIELD , phyvars . FIELD_EXTRA ) print ( ) if print_all or conf . var . sfield : print ( 'surface field:' ) _layout ( phyvars . SFIELD , { } ) print ( ) if print_all or conf . var . rprof : print ( 'rprof:' ) _layout ( phyvars . RPROF , phyvars . RPROF_EXTRA ) print ( ) if print_all or conf . var . time : print ( 'time:' ) _layout ( phyvars . TIME , phyvars . TIME_EXTRA ) print ( ) if print_all or conf . var . plates : print ( 'plates:' ) _layout ( phyvars . PLATES , { } )
Print a list of available variables .
42,186
def report_parsing_problems ( parsing_out ) : _ , empty , faulty = parsing_out if CONFIG_FILE in empty or CONFIG_FILE in faulty : print ( 'Unable to read global config file' , CONFIG_FILE , file = sys . stderr ) print ( 'Please run stagpy config --create' , sep = '\n' , end = '\n\n' , file = sys . stderr ) if CONFIG_LOCAL in faulty : print ( 'Unable to read local config file' , CONFIG_LOCAL , file = sys . stderr ) print ( 'Please run stagpy config --create_local' , sep = '\n' , end = '\n\n' , file = sys . stderr )
Output message about potential parsing problems .
42,187
def config_pp ( subs ) : print ( '(c|f): available only as CLI argument/in the config file' , end = '\n\n' ) for sub in subs : hlp_lst = [ ] for opt , meta in conf [ sub ] . defaults_ ( ) : if meta . cmd_arg ^ meta . conf_arg : opt += ' (c)' if meta . cmd_arg else ' (f)' hlp_lst . append ( ( opt , meta . help ) ) if hlp_lst : print ( '{}:' . format ( sub ) ) _pretty_print ( hlp_lst , sep = ' -- ' , text_width = min ( get_terminal_size ( ) . columns , 100 ) ) print ( )
Pretty print of configuration options .
42,188
def config_cmd ( ) : if not ( conf . common . config or conf . config . create or conf . config . create_local or conf . config . update or conf . config . edit ) : config_pp ( conf . sections_ ( ) ) loam . tools . config_cmd_handler ( conf )
Configuration handling .
42,189
def get_rprof ( step , var ) : if var in step . rprof . columns : rprof = step . rprof [ var ] rad = None if var in phyvars . RPROF : meta = phyvars . RPROF [ var ] else : meta = phyvars . Varr ( var , None , '1' ) elif var in phyvars . RPROF_EXTRA : meta = phyvars . RPROF_EXTRA [ var ] rprof , rad = meta . description ( step ) meta = phyvars . Varr ( misc . baredoc ( meta . description ) , meta . kind , meta . dim ) else : raise UnknownRprofVarError ( var ) rprof , _ = step . sdat . scale ( rprof , meta . dim ) if rad is not None : rad , _ = step . sdat . scale ( rad , 'm' ) return rprof , rad , meta
Extract or compute and rescale requested radial profile .
42,190
def plot_grid ( step ) : rad = get_rprof ( step , 'r' ) [ 0 ] drad = get_rprof ( step , 'dr' ) [ 0 ] _ , unit = step . sdat . scale ( 1 , 'm' ) if unit : unit = ' ({})' . format ( unit ) fig , ( ax1 , ax2 ) = plt . subplots ( 2 , sharex = True ) ax1 . plot ( rad , '-ko' ) ax1 . set_ylabel ( '$r$' + unit ) ax2 . plot ( drad , '-ko' ) ax2 . set_ylabel ( '$dr$' + unit ) ax2 . set_xlim ( [ - 0.5 , len ( rad ) - 0.5 ] ) ax2 . set_xlabel ( 'Cell number' ) misc . saveplot ( fig , 'grid' , step . istep )
Plot cell position and thickness .
42,191
def plot_average ( sdat , lovs ) : steps_iter = iter ( sdat . walk . filter ( rprof = True ) ) try : step = next ( steps_iter ) except StopIteration : return sovs = misc . set_of_vars ( lovs ) istart = step . istep nprofs = 1 rprof_averaged = { } rads = { } metas = { } for rvar in sovs : rprof_averaged [ rvar ] , rad , metas [ rvar ] = get_rprof ( step , rvar ) if rad is not None : rads [ rvar ] = rad for step in steps_iter : nprofs += 1 for rvar in sovs : rprof_averaged [ rvar ] += get_rprof ( step , rvar ) [ 0 ] ilast = step . istep for rvar in sovs : rprof_averaged [ rvar ] /= nprofs rcmb , rsurf = misc . get_rbounds ( step ) rprof_averaged [ 'bounds' ] = ( step . sdat . scale ( rcmb , 'm' ) [ 0 ] , step . sdat . scale ( rsurf , 'm' ) [ 0 ] ) rprof_averaged [ 'r' ] = get_rprof ( step , 'r' ) [ 0 ] + rprof_averaged [ 'bounds' ] [ 0 ] stepstr = '{}_{}' . format ( istart , ilast ) _plot_rprof_list ( sdat , lovs , rprof_averaged , metas , stepstr , rads )
Plot time averaged profiles .
42,192
def plot_every_step ( sdat , lovs ) : sovs = misc . set_of_vars ( lovs ) for step in sdat . walk . filter ( rprof = True ) : rprofs = { } rads = { } metas = { } for rvar in sovs : rprof , rad , meta = get_rprof ( step , rvar ) rprofs [ rvar ] = rprof metas [ rvar ] = meta if rad is not None : rads [ rvar ] = rad rprofs [ 'bounds' ] = misc . get_rbounds ( step ) rcmb , rsurf = misc . get_rbounds ( step ) rprofs [ 'bounds' ] = ( step . sdat . scale ( rcmb , 'm' ) [ 0 ] , step . sdat . scale ( rsurf , 'm' ) [ 0 ] ) rprofs [ 'r' ] = get_rprof ( step , 'r' ) [ 0 ] + rprofs [ 'bounds' ] [ 0 ] stepstr = str ( step . istep ) _plot_rprof_list ( sdat , lovs , rprofs , metas , stepstr , rads )
Plot profiles at each time step .
42,193
def cmd ( ) : sdat = StagyyData ( conf . core . path ) if sdat . rprof is None : return if conf . rprof . grid : for step in sdat . walk . filter ( rprof = True ) : plot_grid ( step ) lovs = misc . list_of_vars ( conf . rprof . plot ) if not lovs : return if conf . rprof . average : plot_average ( sdat , lovs ) else : plot_every_step ( sdat , lovs )
Implementation of rprof subcommand .
42,194
def main ( ) : if not DEBUG : signal . signal ( signal . SIGINT , sigint_handler ) warnings . simplefilter ( 'ignore' ) args = importlib . import_module ( 'stagpy.args' ) error = importlib . import_module ( 'stagpy.error' ) try : args . parse_args ( ) ( ) except error . StagpyError as err : if DEBUG : raise print ( 'Oops! StagPy encountered the following problem while ' 'processing your request.' , 'Please check the path to your simulation and the command line ' 'arguments.' , '' , '{}: {}' . format ( err . __class__ . __name__ , err ) , sep = '\n' , file = sys . stderr ) sys . exit ( )
StagPy entry point
42,195
def _enrich_with_par ( par_nml , par_file ) : par_new = f90nml . read ( str ( par_file ) ) for section , content in par_new . items ( ) : if section not in par_nml : par_nml [ section ] = { } for par , value in content . items ( ) : try : content [ par ] = value . strip ( ) except AttributeError : pass par_nml [ section ] . update ( content )
Enrich a par namelist with the content of a file .
42,196
def readpar ( par_file , root ) : par_nml = deepcopy ( PAR_DEFAULT ) if PAR_DFLT_FILE . is_file ( ) : _enrich_with_par ( par_nml , PAR_DFLT_FILE ) else : PAR_DFLT_FILE . parent . mkdir ( exist_ok = True ) f90nml . write ( par_nml , str ( PAR_DFLT_FILE ) ) if not par_file . is_file ( ) : raise NoParFileError ( par_file ) par_main = f90nml . read ( str ( par_file ) ) if 'default_parameters_parfile' in par_main : par_dflt = par_main [ 'default_parameters_parfile' ] . get ( 'par_name_defaultparameters' , 'par_defaults' ) par_dflt = root / par_dflt if not par_dflt . is_file ( ) : raise NoParFileError ( par_dflt ) _enrich_with_par ( par_nml , par_dflt ) _enrich_with_par ( par_nml , par_file ) par_out = root / par_nml [ 'ioin' ] [ 'output_file_stem' ] / '_parameters.dat' if par_out . is_file ( ) : _enrich_with_par ( par_nml , par_out ) par_out = root / par_nml [ 'ioin' ] [ 'hdf5_output_folder' ] / 'parameters.dat' if par_out . is_file ( ) : _enrich_with_par ( par_nml , par_out ) return par_nml
Read StagYY par file .
42,197
def get_meshes_fld ( step , var ) : fld = step . fields [ var ] if step . geom . twod_xz : xmesh , ymesh = step . geom . x_mesh [ : , 0 , : ] , step . geom . z_mesh [ : , 0 , : ] fld = fld [ : , 0 , : , 0 ] elif step . geom . cartesian and step . geom . twod_yz : xmesh , ymesh = step . geom . y_mesh [ 0 , : , : ] , step . geom . z_mesh [ 0 , : , : ] fld = fld [ 0 , : , : , 0 ] else : xmesh , ymesh = step . geom . x_mesh [ 0 , : , : ] , step . geom . y_mesh [ 0 , : , : ] fld = fld [ 0 , : , : , 0 ] return xmesh , ymesh , fld
Return scalar field along with coordinates meshes .
42,198
def get_meshes_vec ( step , var ) : if step . geom . twod_xz : xmesh , ymesh = step . geom . x_mesh [ : , 0 , : ] , step . geom . z_mesh [ : , 0 , : ] vec1 = step . fields [ var + '1' ] [ : , 0 , : , 0 ] vec2 = step . fields [ var + '3' ] [ : , 0 , : , 0 ] elif step . geom . cartesian and step . geom . twod_yz : xmesh , ymesh = step . geom . y_mesh [ 0 , : , : ] , step . geom . z_mesh [ 0 , : , : ] vec1 = step . fields [ var + '2' ] [ 0 , : , : , 0 ] vec2 = step . fields [ var + '3' ] [ 0 , : , : , 0 ] else : xmesh , ymesh = step . geom . x_mesh [ 0 , : , : ] , step . geom . y_mesh [ 0 , : , : ] pmesh = step . geom . p_mesh [ 0 , : , : ] vec_phi = step . fields [ var + '2' ] [ 0 , : , : , 0 ] vec_r = step . fields [ var + '3' ] [ 0 , : , : , 0 ] vec1 = vec_r * np . cos ( pmesh ) - vec_phi * np . sin ( pmesh ) vec2 = vec_phi * np . cos ( pmesh ) + vec_r * np . sin ( pmesh ) return xmesh , ymesh , vec1 , vec2
Return vector field components along with coordinates meshes .
42,199
def set_of_vars ( arg_plot ) : sovs = set ( tuple ( ( var + '+' ) . split ( '+' ) [ : 2 ] ) for var in arg_plot . split ( ',' ) ) sovs . discard ( ( '' , '' ) ) return sovs
Build set of needed field variables .