idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
34,200 | def round ( self , value_array ) : value = value_array [ 0 ] rounded_value = self . domain [ 0 ] for domain_value in self . domain : if np . abs ( domain_value - value ) < np . abs ( rounded_value - value ) : rounded_value = domain_value return [ rounded_value ] | Rounds a discrete variable by selecting the closest point in the domain Assumes an 1d array with a single element as an input . |
34,201 | def optimize ( self , f = None , df = None , f_df = None , duplicate_manager = None ) : self . f = f self . df = df self . f_df = f_df self . optimizer = choose_optimizer ( self . optimizer_name , self . context_manager . noncontext_bounds ) if self . type_anchor_points_logic == max_objective_anchor_points_logic : anch... | Optimizes the input function . |
34,202 | def evaluate ( self , x ) : if self . n_procs == 1 : f_evals , cost_evals = self . _eval_func ( x ) else : try : f_evals , cost_evals = self . _syncronous_batch_evaluation ( x ) except : if not hasattr ( self , 'parallel_error' ) : print ( 'Error in parallel computation. Fall back to single process!' ) else : self . pa... | Performs the evaluation of the objective at x . |
34,203 | def _syncronous_batch_evaluation ( self , x ) : from multiprocessing import Process , Pipe divided_samples = [ x [ i : : self . n_procs ] for i in range ( self . n_procs ) ] pipe = [ Pipe ( ) for i in range ( self . n_procs ) ] proc = [ Process ( target = spawn ( self . _eval_func ) , args = ( c , k ) ) for k , ( p , c... | Evaluates the function a x where x can be a single location or a batch . The evaluation is performed in parallel according to the number of accessible cores . |
34,204 | def compute_batch ( self , duplicate_manager = None , context_manager = None ) : x , _ = self . acquisition . optimize ( duplicate_manager = duplicate_manager ) return x | Selects the new location to evaluate the objective . |
34,205 | def _compute_acq ( self , x ) : means , stds = self . model . predict ( x ) f_acqu = 0 for m , s in zip ( means , stds ) : f_acqu += - m + self . exploration_weight * s return f_acqu / ( len ( means ) ) | Integrated GP - Lower Confidence Bound |
34,206 | def _compute_acq_withGradients ( self , x ) : means , stds , dmdxs , dsdxs = self . model . predict_withGradients ( x ) f_acqu = None df_acqu = None for m , s , dmdx , dsdx in zip ( means , stds , dmdxs , dsdxs ) : f = - m + self . exploration_weight * s df = - dmdx + self . exploration_weight * dsdx if f_acqu is None ... | Integrated GP - Lower Confidence Bound and its derivative |
34,207 | def _expand_config_space ( self ) : self . config_space_expanded = [ ] for variable in self . config_space : variable_dic = variable . copy ( ) if 'dimensionality' in variable_dic . keys ( ) : dimensionality = variable_dic [ 'dimensionality' ] variable_dic [ 'dimensionality' ] = 1 variables_set = [ variable_dic . copy ... | Expands the config input space into a list of diccionaries one for each variable_dic in which the dimensionality is always one . |
34,208 | def _create_variables_dic ( self ) : self . name_to_variable = { } for variable in self . space_expanded : self . name_to_variable [ variable . name ] = variable | Returns the variable by passing its name |
34,209 | def _translate_space ( self , space ) : self . space = [ ] self . dimensionality = 0 self . has_types = d = { t : False for t in self . supported_types } for i , d in enumerate ( space ) : descriptor = deepcopy ( d ) descriptor [ 'name' ] = descriptor . get ( 'name' , 'var_' + str ( i ) ) descriptor [ 'type' ] = descri... | Translates a list of dictionaries into internal list of variables |
34,210 | def _expand_space ( self ) : self . _expand_config_space ( ) self . space_expanded = [ ] for variable in self . space : self . space_expanded += variable . expand ( ) | Creates an internal list where the variables with dimensionality larger than one are expanded . This list is the one that is used internally to do the optimization . |
34,211 | def objective_to_model ( self , x_objective ) : x_model = [ ] for k in range ( self . objective_dimensionality ) : variable = self . space_expanded [ k ] new_entry = variable . objective_to_model ( x_objective [ 0 , k ] ) x_model += new_entry return x_model | This function serves as interface between objective input vectors and model input vectors |
34,212 | def model_to_objective ( self , x_model ) : idx_model = 0 x_objective = [ ] for idx_obj in range ( self . objective_dimensionality ) : variable = self . space_expanded [ idx_obj ] new_entry = variable . model_to_objective ( x_model , idx_model ) x_objective += new_entry idx_model += variable . dimensionality_in_model r... | This function serves as interface between model input vectors and objective input vectors |
34,213 | def get_subspace ( self , dims ) : subspace = [ ] k = 0 for variable in self . space_expanded : if k in dims : subspace . append ( variable ) k += variable . dimensionality_in_model return subspace | Extracts subspace from the reference of a list of variables in the inputs of the model . |
34,214 | def indicator_constraints ( self , x ) : x = np . atleast_2d ( x ) I_x = np . ones ( ( x . shape [ 0 ] , 1 ) ) if self . constraints is not None : for d in self . constraints : try : exec ( 'constraint = lambda x:' + d [ 'constraint' ] , globals ( ) ) ind_x = ( constraint ( x ) <= 0 ) * 1 I_x *= ind_x . reshape ( x . s... | Returns array of ones and zeros indicating if x is within the constraints |
34,215 | def input_dim ( self ) : n_cont = len ( self . get_continuous_dims ( ) ) n_disc = len ( self . get_discrete_dims ( ) ) return n_cont + n_disc | Extracts the input dimension of the domain . |
34,216 | def round_optimum ( self , x ) : x = np . array ( x ) if not ( ( x . ndim == 1 ) or ( x . ndim == 2 and x . shape [ 0 ] == 1 ) ) : raise ValueError ( "Unexpected dimentionality of x. Got {}, expected (1, N) or (N,)" . format ( x . ndim ) ) if x . ndim == 2 : x = x [ 0 ] x_rounded = [ ] value_index = 0 for variable in s... | Rounds some value x to a feasible value in the design space . x is expected to be a vector or an array with a single row |
34,217 | def get_continuous_bounds ( self ) : bounds = [ ] for d in self . space : if d . type == 'continuous' : bounds . extend ( [ d . domain ] * d . dimensionality ) return bounds | Extracts the bounds of the continuous variables . |
34,218 | def get_continuous_dims ( self ) : continuous_dims = [ ] for i in range ( self . dimensionality ) : if self . space_expanded [ i ] . type == 'continuous' : continuous_dims += [ i ] return continuous_dims | Returns the dimension of the continuous components of the domain . |
34,219 | def get_discrete_grid ( self ) : sets_grid = [ ] for d in self . space : if d . type == 'discrete' : sets_grid . extend ( [ d . domain ] * d . dimensionality ) return np . array ( list ( itertools . product ( * sets_grid ) ) ) | Computes a Numpy array with the grid of points that results after crossing the possible outputs of the discrete variables |
34,220 | def get_discrete_dims ( self ) : discrete_dims = [ ] for i in range ( self . dimensionality ) : if self . space_expanded [ i ] . type == 'discrete' : discrete_dims += [ i ] return discrete_dims | Returns the dimension of the discrete components of the domain . |
34,221 | def get_bandit ( self ) : arms_bandit = [ ] for d in self . space : if d . type == 'bandit' : arms_bandit += tuple ( map ( tuple , d . domain ) ) return np . asarray ( arms_bandit ) | Extracts the arms of the bandit if any . |
34,222 | def predict ( self , X ) : X = np . atleast_2d ( X ) m = np . empty ( shape = ( 0 , 1 ) ) s = np . empty ( shape = ( 0 , 1 ) ) for k in range ( X . shape [ 0 ] ) : preds = [ ] for pred in self . model . estimators_ : preds . append ( pred . predict ( X [ k , : ] ) [ 0 ] ) m = np . vstack ( ( m , np . array ( preds ) . ... | Predictions with the model . Returns posterior means and standard deviations at X . |
34,223 | def _compute_acq ( self , x ) : means , stds = self . model . predict ( x ) fmins = self . model . get_fmin ( ) f_acqu = 0 for m , s , fmin in zip ( means , stds , fmins ) : _ , Phi , _ = get_quantiles ( self . jitter , fmin , m , s ) f_acqu += Phi return f_acqu / len ( means ) | Integrated Expected Improvement |
34,224 | def _compute_acq_withGradients ( self , x ) : means , stds , dmdxs , dsdxs = self . model . predict_withGradients ( x ) fmins = self . model . get_fmin ( ) f_acqu = None df_acqu = None for m , s , fmin , dmdx , dsdx in zip ( means , stds , fmins , dmdxs , dsdxs ) : phi , Phi , u = get_quantiles ( self . jitter , fmin ,... | Integrated Expected Improvement and its derivative |
34,225 | def plot_convergence ( Xdata , best_Y , filename = None ) : n = Xdata . shape [ 0 ] aux = ( Xdata [ 1 : n , : ] - Xdata [ 0 : n - 1 , : ] ) ** 2 distances = np . sqrt ( aux . sum ( axis = 1 ) ) plt . figure ( figsize = ( 10 , 5 ) ) plt . subplot ( 1 , 2 , 1 ) plt . plot ( list ( range ( n - 1 ) ) , distances , '-ro' ) ... | Plots to evaluate the convergence of standard Bayesian optimization algorithms |
34,226 | def compute_batch ( self , duplicate_manager = None , context_manager = None ) : from . . . acquisitions import AcquisitionLP assert isinstance ( self . acquisition , AcquisitionLP ) self . acquisition . update_batches ( None , None , None ) X_batch = self . acquisition . optimize ( ) [ 0 ] k = 1 if self . batch_size >... | Computes the elements of the batch sequentially by penalizing the acquisition . |
34,227 | def check_notebooks_for_errors ( notebooks_directory ) : print ( "Checking notebooks in directory {} for errors" . format ( notebooks_directory ) ) failed_notebooks_count = 0 for file in os . listdir ( notebooks_directory ) : if file . endswith ( ".ipynb" ) : print ( "Checking notebook " + file ) full_file_path = os . ... | Evaluates all notebooks in given directory and prints errors if any |
34,228 | def predict ( self , X , with_noise = True ) : m , v = self . _predict ( X , False , with_noise ) return m , np . sqrt ( v ) | Predictions with the model . Returns posterior means and standard deviations at X . Note that this is different in GPy where the variances are given . |
34,229 | def predict_covariance ( self , X , with_noise = True ) : _ , v = self . _predict ( X , True , with_noise ) return v | Predicts the covariance matric for points in X . |
34,230 | def predict_withGradients ( self , X ) : if X . ndim == 1 : X = X [ None , : ] m , v = self . model . predict ( X ) v = np . clip ( v , 1e-10 , np . inf ) dmdx , dvdx = self . model . predictive_gradients ( X ) dmdx = dmdx [ : , : , 0 ] dsdx = dvdx / ( 2 * np . sqrt ( v ) ) return m , np . sqrt ( v ) , dmdx , dsdx | Returns the mean standard deviation mean gradient and standard deviation gradient at X . |
34,231 | def predict ( self , X ) : if X . ndim == 1 : X = X [ None , : ] ps = self . model . param_array . copy ( ) means = [ ] stds = [ ] for s in self . hmc_samples : if self . model . _fixes_ is None : self . model [ : ] = s else : self . model [ self . model . _fixes_ ] = s self . model . _trigger_params_changed ( ) m , v ... | Predictions with the model for all the MCMC samples . Returns posterior means and standard deviations at X . Note that this is different in GPy where the variances are given . |
34,232 | def get_fmin ( self ) : ps = self . model . param_array . copy ( ) fmins = [ ] for s in self . hmc_samples : if self . model . _fixes_ is None : self . model [ : ] = s else : self . model [ self . model . _fixes_ ] = s self . model . _trigger_params_changed ( ) fmins . append ( self . model . predict ( self . model . X... | Returns the location where the posterior mean is takes its minimal value . |
34,233 | def predict_withGradients ( self , X ) : if X . ndim == 1 : X = X [ None , : ] ps = self . model . param_array . copy ( ) means = [ ] stds = [ ] dmdxs = [ ] dsdxs = [ ] for s in self . hmc_samples : if self . model . _fixes_ is None : self . model [ : ] = s else : self . model [ self . model . _fixes_ ] = s self . mode... | Returns the mean standard deviation mean gradient and standard deviation gradient at X for all the MCMC samples . |
34,234 | def load_objective ( config ) : assert 'prjpath' in config assert 'main-file' in config , "The problem file ('main-file') is missing!" os . chdir ( config [ 'prjpath' ] ) if config [ 'language' ] . lower ( ) == 'python' : assert config [ 'main-file' ] . endswith ( '.py' ) , 'The python problem file has to end with .py!... | Loads the objective function from a . json file . |
34,235 | def _compute_acq ( self , x ) : m , s = self . model . predict ( x ) fmin = self . model . get_fmin ( ) phi , Phi , u = get_quantiles ( self . jitter , fmin , m , s ) f_acqu = s * ( u * Phi + phi ) return f_acqu | Computes the Expected Improvement per unit of cost |
34,236 | def update_config ( config_new , config_default ) : if any ( [ isinstance ( v , dict ) for v in list ( config_new . values ( ) ) ] ) : for k , v in list ( config_new . items ( ) ) : if isinstance ( v , dict ) and k in config_default : update_config ( config_new [ k ] , config_default [ k ] ) else : config_default [ k ]... | Updates the loaded method configuration with default values . |
34,237 | def parser ( input_file_path = 'config.json' ) : try : with open ( input_file_path , 'r' ) as config_file : config_new = json . load ( config_file ) config_file . close ( ) except : raise Exception ( 'Config file "' + input_file_path + '" not loaded properly. Please check it an try again.' ) import copy options = updat... | Parser for the . json file containing the configuration of the method . |
34,238 | def get_samples ( self , n_samples , log_p_function , burn_in_steps = 50 ) : restarts = initial_design ( 'random' , self . space , n_samples ) sampler = emcee . EnsembleSampler ( n_samples , self . space . input_dim ( ) , log_p_function ) samples , samples_log , _ = sampler . run_mcmc ( restarts , burn_in_steps ) if le... | Generates samples . |
34,239 | def suggest_next_locations ( self , context = None , pending_X = None , ignored_X = None ) : self . model_parameters_iterations = None self . num_acquisitions = 0 self . context = context self . _update_model ( self . normalization_type ) suggested_locations = self . _compute_next_evaluations ( pending_zipped_X = pendi... | Run a single optimization step and return the next locations to evaluate the objective . Number of suggested locations equals to batch_size . |
34,240 | def _print_convergence ( self ) : if self . verbosity : if ( self . num_acquisitions == self . max_iter ) and ( not self . initial_iter ) : print ( ' ** Maximum number of iterations reached **' ) return 1 elif ( self . _distance_last_evaluations ( ) < self . eps ) and ( not self . initial_iter ) : print ( ' ** Two ... | Prints the reason why the optimization stopped . |
34,241 | def evaluate_objective ( self ) : self . Y_new , cost_new = self . objective . evaluate ( self . suggested_sample ) self . cost . update_cost_model ( self . suggested_sample , cost_new ) self . Y = np . vstack ( ( self . Y , self . Y_new ) ) | Evaluates the objective |
34,242 | def _compute_results ( self ) : self . Y_best = best_value ( self . Y ) self . x_opt = self . X [ np . argmin ( self . Y ) , : ] self . fx_opt = np . min ( self . Y ) | Computes the optimum and its value . |
34,243 | def _distance_last_evaluations ( self ) : if self . X . shape [ 0 ] < 2 : return np . inf return np . sqrt ( np . sum ( ( self . X [ - 1 , : ] - self . X [ - 2 , : ] ) ** 2 ) ) | Computes the distance between the last two evaluations . |
34,244 | def save_evaluations ( self , evaluations_file = None ) : iterations = np . array ( range ( 1 , self . Y . shape [ 0 ] + 1 ) ) [ : , None ] results = np . hstack ( ( iterations , self . Y , self . X ) ) header = [ 'Iteration' , 'Y' ] + [ 'var_' + str ( k ) for k in range ( 1 , self . X . shape [ 1 ] + 1 ) ] data = [ he... | Saves evaluations at each iteration of the optimization |
34,245 | def save_models ( self , models_file ) : if self . model_parameters_iterations is None : raise ValueError ( "No iterations have been carried out yet and hence no iterations of the BO can be saved" ) iterations = np . array ( range ( 1 , self . model_parameters_iterations . shape [ 0 ] + 1 ) ) [ : , None ] results = np ... | Saves model parameters at each iteration of the optimization |
34,246 | def _init_design_chooser ( self ) : if self . f is None and ( self . X is None or self . Y is None ) : raise InvalidConfigError ( "Initial data for both X and Y is required when objective function is not provided" ) if self . X is None : self . X = initial_design ( self . initial_design_type , self . space , self . ini... | Initializes the choice of X and Y based on the selected initial design and number of points selected . |
34,247 | def crontab ( minute = '*' , hour = '*' , day = '*' , month = '*' , day_of_week = '*' ) : validation = ( ( 'm' , month , range ( 1 , 13 ) ) , ( 'd' , day , range ( 1 , 32 ) ) , ( 'w' , day_of_week , range ( 8 ) ) , ( 'H' , hour , range ( 24 ) ) , ( 'M' , minute , range ( 60 ) ) ) cron_settings = [ ] for ( date_str , va... | Convert a crontab - style set of parameters into a test function that will return True when the given datetime matches the parameters set forth in the crontab . |
34,248 | def put_if_empty ( self , key , value ) : if self . has_data_for_key ( key ) : return False self . put_data ( key , value ) return True | Atomically write data only if the key is not already set . |
34,249 | def make_naive ( dt ) : tt = dt . utctimetuple ( ) ts = calendar . timegm ( tt ) local_tt = time . localtime ( ts ) return datetime . datetime ( * local_tt [ : 6 ] ) | Makes an aware datetime . datetime naive in local time zone . |
34,250 | def sleep_for_interval ( self , start_ts , nseconds ) : sleep_time = nseconds - ( time . time ( ) - start_ts ) if sleep_time <= 0 : return self . _logger . debug ( 'Sleeping for %s' , sleep_time ) sleep_time = nseconds - ( time . time ( ) - start_ts ) if sleep_time > 0 : time . sleep ( sleep_time ) | Sleep for a given interval with respect to the start timestamp . |
34,251 | def start ( self ) : if self . huey . immediate : raise ConfigurationError ( 'Consumer cannot be run with Huey instances where immediate ' 'is enabled. Please check your configuration and ensure that ' '"huey.immediate = False".' ) self . _logger . info ( 'Huey consumer started with %s %s, PID %s at %s' , self . worker... | Start all consumer processes and register signal handlers . |
34,252 | def stop ( self , graceful = False ) : self . stop_flag . set ( ) if graceful : self . _logger . info ( 'Shutting down gracefully...' ) try : for _ , worker_process in self . worker_threads : worker_process . join ( ) except KeyboardInterrupt : self . _logger . info ( 'Received request to shut down now.' ) else : self ... | Set the stop - flag . |
34,253 | def run ( self ) : self . start ( ) timeout = self . _stop_flag_timeout health_check_ts = time . time ( ) while True : try : self . stop_flag . wait ( timeout = timeout ) except KeyboardInterrupt : self . _logger . info ( 'Received SIGINT' ) self . stop ( graceful = True ) except : self . _logger . exception ( 'Error i... | Run the consumer . |
34,254 | def check_worker_health ( self ) : self . _logger . debug ( 'Checking worker health.' ) workers = [ ] restart_occurred = False for i , ( worker , worker_t ) in enumerate ( self . worker_threads ) : if not self . environment . is_alive ( worker_t ) : self . _logger . warning ( 'Worker %d died, restarting.' , i + 1 ) wor... | Check the health of the worker processes . Workers that have died will be replaced with new workers . |
34,255 | def close_db ( fn ) : @ wraps ( fn ) def inner ( * args , ** kwargs ) : try : return fn ( * args , ** kwargs ) finally : if not HUEY . immediate : close_old_connections ( ) return inner | Decorator to be used with tasks that may operate on the database . |
34,256 | def list ( self ) : mylist = [ ] for prod in self . product_list : if self . purchasable ( prod ) and not self . entitled ( prod ) : mylist . append ( prod ) return mylist | return list of purchasable and not entitled products |
34,257 | def _find_tide_info ( predictions ) : last_prediction = None first_high_tide = None second_high_tide = None low_tide = None first_tide_done = False for prediction in predictions : if last_prediction is None : last_prediction = prediction continue if last_prediction [ 'v' ] < prediction [ 'v' ] : if not first_tide_done ... | Algorithm to find the 2 high tides for the day the first of which is smaller and occurs mid - day the second of which is larger and typically in the evening . |
34,258 | def enqueue ( self , stream_url , offset = 0 , opaque_token = None ) : directive = self . _play_directive ( 'ENQUEUE' ) audio_item = self . _audio_item ( stream_url = stream_url , offset = offset , push_buffer = False , opaque_token = opaque_token ) audio_item [ 'stream' ] [ 'expectedPreviousToken' ] = current_stream .... | Adds stream to the queue . Does not impact the currently playing stream . |
34,259 | def play_next ( self , stream_url = None , offset = 0 , opaque_token = None ) : directive = self . _play_directive ( 'REPLACE_ENQUEUED' ) directive [ 'audioItem' ] = self . _audio_item ( stream_url = stream_url , offset = offset , opaque_token = opaque_token ) self . _response [ 'directives' ] . append ( directive ) re... | Replace all streams in the queue but does not impact the currently playing stream . |
34,260 | def resume ( self ) : directive = self . _play_directive ( 'REPLACE_ALL' ) directive [ 'audioItem' ] = self . _audio_item ( ) self . _response [ 'directives' ] . append ( directive ) return self | Sends Play Directive to resume playback at the paused offset |
34,261 | def _audio_item ( self , stream_url = None , offset = 0 , push_buffer = True , opaque_token = None ) : audio_item = { 'stream' : { } } stream = audio_item [ 'stream' ] if not stream_url : stream [ 'url' ] = current_stream . url stream [ 'token' ] = current_stream . token stream [ 'offsetInMilliseconds' ] = current_stre... | Builds an AudioPlayer Directive s audioItem and updates current_stream |
34,262 | def clear_queue ( self , stop = False ) : directive = { } directive [ 'type' ] = 'AudioPlayer.ClearQueue' if stop : directive [ 'clearBehavior' ] = 'CLEAR_ALL' else : directive [ 'clearBehavior' ] = 'CLEAR_ENQUEUED' self . _response [ 'directives' ] . append ( directive ) return self | Clears queued streams and optionally stops current stream . |
34,263 | def find_ask ( ) : if hasattr ( current_app , 'ask' ) : return getattr ( current_app , 'ask' ) else : if hasattr ( current_app , 'blueprints' ) : blueprints = getattr ( current_app , 'blueprints' ) for blueprint_name in blueprints : if hasattr ( blueprints [ blueprint_name ] , 'ask' ) : return getattr ( blueprints [ bl... | Find our instance of Ask navigating Local s and possible blueprints . |
34,264 | def init_app ( self , app , path = 'templates.yaml' ) : if self . _route is None : raise TypeError ( "route is a required argument when app is not None" ) self . app = app app . ask = self app . add_url_rule ( self . _route , view_func = self . _flask_view_func , methods = [ 'POST' ] ) app . jinja_loader = ChoiceLoader... | Initializes Ask app by setting configuration variables loading templates and maps Ask route to a flask view . |
34,265 | def launch ( self , f ) : self . _launch_view_func = f @ wraps ( f ) def wrapper ( * args , ** kw ) : self . _flask_view_func ( * args , ** kw ) return f | Decorator maps a view function as the endpoint for an Alexa LaunchRequest and starts the skill . |
34,266 | def session_ended ( self , f ) : self . _session_ended_view_func = f @ wraps ( f ) def wrapper ( * args , ** kw ) : self . _flask_view_func ( * args , ** kw ) return f | Decorator routes Alexa SessionEndedRequest to the wrapped view function to end the skill . |
34,267 | def intent ( self , intent_name , mapping = { } , convert = { } , default = { } ) : def decorator ( f ) : self . _intent_view_funcs [ intent_name ] = f self . _intent_mappings [ intent_name ] = mapping self . _intent_converts [ intent_name ] = convert self . _intent_defaults [ intent_name ] = default @ wraps ( f ) def ... | Decorator routes an Alexa IntentRequest and provides the slot parameters to the wrapped function . |
34,268 | def default_intent ( self , f ) : self . _default_intent_view_func = f @ wraps ( f ) def wrapper ( * args , ** kw ) : self . _flask_view_func ( * args , ** kw ) return f | Decorator routes any Alexa IntentRequest that is not matched by any existing |
34,269 | def display_element_selected ( self , f ) : self . _display_element_selected_func = f @ wraps ( f ) def wrapper ( * args , ** kw ) : self . _flask_view_func ( * args , ** kw ) return f | Decorator routes Alexa Display . ElementSelected request to the wrapped view function . |
34,270 | def on_purchase_completed ( self , mapping = { 'payload' : 'payload' , 'name' : 'name' , 'status' : 'status' , 'token' : 'token' } , convert = { } , default = { } ) : def decorator ( f ) : self . _intent_view_funcs [ 'Connections.Response' ] = f self . _intent_mappings [ 'Connections.Response' ] = mapping self . _inten... | Decorator routes an Connections . Response to the wrapped function . |
34,271 | def run_aws_lambda ( self , event ) : self . app . config [ 'ASK_VERIFY_REQUESTS' ] = False enc , esc = sys . getfilesystemencoding ( ) , 'surrogateescape' def unicode_to_wsgi ( u ) : return u . encode ( enc , esc ) . decode ( 'iso-8859-1' ) environ = { k : unicode_to_wsgi ( v ) for k , v in os . environ . items ( ) } ... | Invoke the Flask Ask application from an AWS Lambda function handler . |
34,272 | def _parse_timestamp ( timestamp ) : if timestamp : try : return aniso8601 . parse_datetime ( timestamp ) except AttributeError : try : return datetime . utcfromtimestamp ( timestamp ) except : return datetime . utcfromtimestamp ( timestamp / 1000 ) raise ValueError ( 'Invalid timestamp value! Cannot parse from either ... | Parse a given timestamp value raising ValueError if None or Flasey |
34,273 | def _map_player_request_to_func ( self , player_request_type ) : view_func = self . _intent_view_funcs . get ( player_request_type , lambda : None ) argspec = inspect . getargspec ( view_func ) arg_names = argspec . args arg_values = self . _map_params_to_view_args ( player_request_type , arg_names ) return partial ( v... | Provides appropriate parameters to the on_playback functions . |
34,274 | def _map_purchase_request_to_func ( self , purchase_request_type ) : if purchase_request_type in self . _intent_view_funcs : view_func = self . _intent_view_funcs [ purchase_request_type ] else : raise NotImplementedError ( 'Request type "{}" not found and no default view specified.' . format ( purchase_request_type ) ... | Provides appropriate parameters to the on_purchase functions . |
34,275 | def push_stream ( cache , user_id , stream ) : stack = cache . get ( user_id ) if stack is None : stack = [ ] if stream : stack . append ( stream ) return cache . set ( user_id , stack ) return None | Push a stream onto the stream stack in cache . |
34,276 | def pop_stream ( cache , user_id ) : stack = cache . get ( user_id ) if stack is None : return None result = stack . pop ( ) if len ( stack ) == 0 : cache . delete ( user_id ) else : cache . set ( user_id , stack ) return result | Pop an item off the stack in the cache . If stack is empty after pop it deletes the stack . |
34,277 | def top_stream ( cache , user_id ) : if not user_id : return None stack = cache . get ( user_id ) if stack is None : return None return stack . pop ( ) | Peek at the top of the stack in the cache . |
34,278 | def wrap ( content , version , account = None ) : envelope = create_element ( 's:Envelope' , nsmap = ns_translation ) header = create_element ( 's:Header' ) requestserverversion = create_element ( 't:RequestServerVersion' , Version = version ) header . append ( requestserverversion ) if account : if account . access_ty... | Generate the necessary boilerplate XML for a raw SOAP request . The XML is specific to the server version . ExchangeImpersonation allows to act as the user we want to impersonate . |
34,279 | def discover ( email , credentials ) : log . debug ( 'Attempting autodiscover on email %s' , email ) if not isinstance ( credentials , Credentials ) : raise ValueError ( "'credentials' %r must be a Credentials instance" % credentials ) domain = get_domain ( email ) autodiscover_key = ( domain , credentials ) log . debu... | Performs the autodiscover dance and returns the primary SMTP address of the account and a Protocol on success . The autodiscover and EWS server might not be the same so we use a different Protocol to do the autodiscover request and return a hopefully - cached Protocol to the callee . |
34,280 | def to_server_timezone ( self , timezones , for_year ) : candidates = set ( ) for tz_id , tz_name , tz_periods , tz_transitions , tz_transitions_groups in timezones : candidate = self . from_server_timezone ( tz_periods , tz_transitions , tz_transitions_groups , for_year ) if candidate == self : log . debug ( 'Found ex... | Returns the Microsoft timezone ID corresponding to this timezone . There may not be a match at all and there may be multiple matches . If so we return a random timezone ID . |
34,281 | def decrease_poolsize ( self ) : if self . _session_pool_size <= 1 : raise SessionPoolMinSizeReached ( 'Session pool size cannot be decreased further' ) with self . _session_pool_lock : if self . _session_pool_size <= 1 : log . debug ( 'Session pool size was decreased in another thread' ) return log . warning ( 'Loweri... | Decreases the session pool size in response to error messages from the server requesting to rate - limit requests . We decrease by one session per call . |
34,282 | def is_iterable ( value , generators_allowed = False ) : if generators_allowed : if not isinstance ( value , string_types + ( bytes , ) ) and hasattr ( value , '__iter__' ) : return True else : if isinstance ( value , ( tuple , list , set ) ) : return True return False | Checks if value is a list - like object . Don t match generators and generator - like objects here by default because callers don t necessarily guarantee that they only iterate the value once . Take care to not match string types and bytes . |
34,283 | def chunkify ( iterable , chunksize ) : from . queryset import QuerySet if hasattr ( iterable , '__getitem__' ) and not isinstance ( iterable , QuerySet ) : for i in range ( 0 , len ( iterable ) , chunksize ) : yield iterable [ i : i + chunksize ] else : chunk = [ ] for i in iterable : chunk . append ( i ) if len ( chu... | Splits an iterable into chunks of size chunksize . The last chunk may be smaller than chunksize . |
34,284 | def peek ( iterable ) : from . queryset import QuerySet if isinstance ( iterable , QuerySet ) : raise ValueError ( 'Cannot peek on a QuerySet' ) if hasattr ( iterable , '__len__' ) : return not iterable , iterable try : first = next ( iterable ) except StopIteration : return True , iterable return False , itertools . c... | Checks if an iterable is empty and returns status and the rewinded iterable |
34,285 | def xml_to_str ( tree , encoding = None , xml_declaration = False ) : if xml_declaration and not encoding : raise ValueError ( "'xml_declaration' is not supported when 'encoding' is None" ) if encoding : return tostring ( tree , encoding = encoding , xml_declaration = True ) return tostring ( tree , encoding = text_typ... | Serialize an XML tree . Returns unicode if encoding is None . Otherwise we return encoded bytes . |
34,286 | def is_xml ( text ) : bom_len = len ( BOM_UTF8 ) if text [ : bom_len ] == BOM_UTF8 : return text [ bom_len : bom_len + 5 ] == b'<?xml' return text [ : 5 ] == b'<?xml' | Helper function . Lightweight test if response is an XML doc |
34,287 | def call ( self , items , additional_fields , shape ) : return self . _pool_requests ( payload_func = self . get_payload , ** dict ( items = items , additional_fields = additional_fields , shape = shape , ) ) | Returns all items in an account that correspond to a list of ID s in stable order . |
34,288 | def call ( self , additional_fields , restriction , shape , depth , max_items , offset ) : from . folders import Folder roots = { f . root for f in self . folders } if len ( roots ) != 1 : raise ValueError ( 'FindFolder must be called with folders in the same root hierarchy (%r)' % roots ) root = roots . pop ( ) for el... | Find subfolders of a folder . |
34,289 | def call ( self , folders , additional_fields , shape ) : from . folders import Folder , DistinguishedFolderId , RootOfHierarchy folders_list = list ( folders ) for folder , elem in zip ( folders_list , self . _get_elements ( payload = self . get_payload ( folders = folders , additional_fields = additional_fields , sha... | Takes a folder ID and returns the full information for that folder . |
34,290 | def get_payload ( self , items ) : from . properties import ParentFolderId uploaditems = create_element ( 'm:%s' % self . SERVICE_NAME ) itemselement = create_element ( 'm:Items' ) uploaditems . append ( itemselement ) for parent_folder , data_str in items : item = create_element ( 't:Item' , CreateAction = 'CreateNew'... | Upload given items to given account |
34,291 | def find_items ( self , q , shape = ID_ONLY , depth = SHALLOW , additional_fields = None , order_fields = None , calendar_view = None , page_size = None , max_items = None , offset = 0 ) : if shape not in SHAPE_CHOICES : raise ValueError ( "'shape' %s must be one of %s" % ( shape , SHAPE_CHOICES ) ) if depth not in ITE... | Private method to call the FindItem service |
34,292 | def get_distinguished ( cls , account ) : if not cls . DISTINGUISHED_FOLDER_ID : raise ValueError ( 'Class %s must have a DISTINGUISHED_FOLDER_ID value' % cls ) folders = list ( FolderCollection ( account = account , folders = [ cls ( account = account , name = cls . DISTINGUISHED_FOLDER_ID , is_distinguished = True ) ... | Gets the distinguished folder for this folder class |
34,293 | def folder_cls_from_folder_name ( cls , folder_name , locale ) : for folder_cls in cls . WELLKNOWN_FOLDERS + NON_DELETEABLE_FOLDERS : if folder_name . lower ( ) in folder_cls . localized_names ( locale ) : return folder_cls raise KeyError ( ) | Returns the folder class that matches a localized folder name . |
34,294 | def register ( cls , attr_name , attr_cls ) : if not cls . INSERT_AFTER_FIELD : raise ValueError ( 'Class %s is missing INSERT_AFTER_FIELD value' % cls ) try : cls . get_field_by_fieldname ( attr_name ) except InvalidField : pass else : raise ValueError ( "'%s' is already registered" % attr_name ) if not issubclass ( a... | Register a custom extended property in this item class so they can be accessed just like any other attribute |
34,295 | def attach ( self , attachments ) : if not is_iterable ( attachments , generators_allowed = True ) : attachments = [ attachments ] for a in attachments : if not a . parent_item : a . parent_item = self if self . id and not a . attachment_id : a . attach ( ) if a not in self . attachments : self . attachments . append (... | Add an attachment or a list of attachments to this item . If the item has already been saved the attachments will be created on the server immediately . If the item has not yet been saved the attachments will be created on the server the item is saved . |
34,296 | def detach ( self , attachments ) : if not is_iterable ( attachments , generators_allowed = True ) : attachments = [ attachments ] for a in attachments : if a . parent_item is not self : raise ValueError ( 'Attachment does not belong to this item' ) if self . id : a . detach ( ) if a in self . attachments : self . atta... | Remove an attachment or a list of attachments from this item . If the item has already been saved the attachments will be deleted on the server immediately . If the item has not yet been saved the attachments will simply not be created on the server the item is saved . |
34,297 | def back_off_until ( self ) : if self . _back_off_until is None : return None with self . _back_off_lock : if self . _back_off_until is None : return None if self . _back_off_until < datetime . datetime . now ( ) : self . _back_off_until = None return None return self . _back_off_until | Returns the back off value as a datetime . Resets the current back off value if it has expired . |
34,298 | def generate_map ( timeout = 10 ) : r = requests . get ( CLDR_WINZONE_URL , timeout = timeout ) if r . status_code != 200 : raise ValueError ( 'Unexpected response: %s' % r ) tz_map = { } for e in to_xml ( r . content ) . find ( 'windowsZones' ) . find ( 'mapTimezones' ) . findall ( 'mapZone' ) : for location in e . ge... | Helper method to update the map if the CLDR database is updated |
34,299 | def run ( ) : obj = CompareGOsCli ( ) obj . write ( obj . kws . get ( 'xlsx' ) , obj . kws . get ( 'ofile' ) , obj . kws . get ( 'verbose' , False ) ) | Compare two or more sets of GO IDs . Best done using sections . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.