id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
242,000
SheffieldML/GPyOpt
GPyOpt/acquisitions/LP.py
AcquisitionLP.d_acquisition_function
def d_acquisition_function(self, x): """ Returns the gradient of the acquisition function at x. """ x = np.atleast_2d(x) if self.transform=='softplus': fval = -self.acq.acquisition_function(x)[:,0] scale = 1./(np.log1p(np.exp(fval))*(1.+np.exp(-fval))) elif self.transform=='none': fval = -self.acq.acquisition_function(x)[:,0] scale = 1./fval else: scale = 1. if self.X_batch is None: _, grad_acq_x = self.acq.acquisition_function_withGradients(x) return scale*grad_acq_x else: _, grad_acq_x = self.acq.acquisition_function_withGradients(x) return scale*grad_acq_x - self._d_hammer_function(x, self.X_batch, self.r_x0, self.s_x0)
python
def d_acquisition_function(self, x): x = np.atleast_2d(x) if self.transform=='softplus': fval = -self.acq.acquisition_function(x)[:,0] scale = 1./(np.log1p(np.exp(fval))*(1.+np.exp(-fval))) elif self.transform=='none': fval = -self.acq.acquisition_function(x)[:,0] scale = 1./fval else: scale = 1. if self.X_batch is None: _, grad_acq_x = self.acq.acquisition_function_withGradients(x) return scale*grad_acq_x else: _, grad_acq_x = self.acq.acquisition_function_withGradients(x) return scale*grad_acq_x - self._d_hammer_function(x, self.X_batch, self.r_x0, self.s_x0)
[ "def", "d_acquisition_function", "(", "self", ",", "x", ")", ":", "x", "=", "np", ".", "atleast_2d", "(", "x", ")", "if", "self", ".", "transform", "==", "'softplus'", ":", "fval", "=", "-", "self", ".", "acq", ".", "acquisition_function", "(", "x", ...
Returns the gradient of the acquisition function at x.
[ "Returns", "the", "gradient", "of", "the", "acquisition", "function", "at", "x", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/acquisitions/LP.py#L112-L132
242,001
SheffieldML/GPyOpt
GPyOpt/acquisitions/LP.py
AcquisitionLP.acquisition_function_withGradients
def acquisition_function_withGradients(self, x): """ Returns the acquisition function and its its gradient at x. """ aqu_x = self.acquisition_function(x) aqu_x_grad = self.d_acquisition_function(x) return aqu_x, aqu_x_grad
python
def acquisition_function_withGradients(self, x): aqu_x = self.acquisition_function(x) aqu_x_grad = self.d_acquisition_function(x) return aqu_x, aqu_x_grad
[ "def", "acquisition_function_withGradients", "(", "self", ",", "x", ")", ":", "aqu_x", "=", "self", ".", "acquisition_function", "(", "x", ")", "aqu_x_grad", "=", "self", ".", "d_acquisition_function", "(", "x", ")", "return", "aqu_x", ",", "aqu_x_grad" ]
Returns the acquisition function and its its gradient at x.
[ "Returns", "the", "acquisition", "function", "and", "its", "its", "gradient", "at", "x", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/acquisitions/LP.py#L134-L140
242,002
SheffieldML/GPyOpt
GPyOpt/acquisitions/base.py
AcquisitionBase.acquisition_function
def acquisition_function(self,x): """ Takes an acquisition and weights it so the domain and cost are taken into account. """ f_acqu = self._compute_acq(x) cost_x, _ = self.cost_withGradients(x) return -(f_acqu*self.space.indicator_constraints(x))/cost_x
python
def acquisition_function(self,x): f_acqu = self._compute_acq(x) cost_x, _ = self.cost_withGradients(x) return -(f_acqu*self.space.indicator_constraints(x))/cost_x
[ "def", "acquisition_function", "(", "self", ",", "x", ")", ":", "f_acqu", "=", "self", ".", "_compute_acq", "(", "x", ")", "cost_x", ",", "_", "=", "self", ".", "cost_withGradients", "(", "x", ")", "return", "-", "(", "f_acqu", "*", "self", ".", "spa...
Takes an acquisition and weights it so the domain and cost are taken into account.
[ "Takes", "an", "acquisition", "and", "weights", "it", "so", "the", "domain", "and", "cost", "are", "taken", "into", "account", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/acquisitions/base.py#L33-L39
242,003
SheffieldML/GPyOpt
GPyOpt/acquisitions/base.py
AcquisitionBase.acquisition_function_withGradients
def acquisition_function_withGradients(self, x): """ Takes an acquisition and it gradient and weights it so the domain and cost are taken into account. """ f_acqu,df_acqu = self._compute_acq_withGradients(x) cost_x, cost_grad_x = self.cost_withGradients(x) f_acq_cost = f_acqu/cost_x df_acq_cost = (df_acqu*cost_x - f_acqu*cost_grad_x)/(cost_x**2) return -f_acq_cost*self.space.indicator_constraints(x), -df_acq_cost*self.space.indicator_constraints(x)
python
def acquisition_function_withGradients(self, x): f_acqu,df_acqu = self._compute_acq_withGradients(x) cost_x, cost_grad_x = self.cost_withGradients(x) f_acq_cost = f_acqu/cost_x df_acq_cost = (df_acqu*cost_x - f_acqu*cost_grad_x)/(cost_x**2) return -f_acq_cost*self.space.indicator_constraints(x), -df_acq_cost*self.space.indicator_constraints(x)
[ "def", "acquisition_function_withGradients", "(", "self", ",", "x", ")", ":", "f_acqu", ",", "df_acqu", "=", "self", ".", "_compute_acq_withGradients", "(", "x", ")", "cost_x", ",", "cost_grad_x", "=", "self", ".", "cost_withGradients", "(", "x", ")", "f_acq_c...
Takes an acquisition and it gradient and weights it so the domain and cost are taken into account.
[ "Takes", "an", "acquisition", "and", "it", "gradient", "and", "weights", "it", "so", "the", "domain", "and", "cost", "are", "taken", "into", "account", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/acquisitions/base.py#L42-L50
242,004
SheffieldML/GPyOpt
GPyOpt/util/general.py
reshape
def reshape(x,input_dim): ''' Reshapes x into a matrix with input_dim columns ''' x = np.array(x) if x.size ==input_dim: x = x.reshape((1,input_dim)) return x
python
def reshape(x,input_dim): ''' Reshapes x into a matrix with input_dim columns ''' x = np.array(x) if x.size ==input_dim: x = x.reshape((1,input_dim)) return x
[ "def", "reshape", "(", "x", ",", "input_dim", ")", ":", "x", "=", "np", ".", "array", "(", "x", ")", "if", "x", ".", "size", "==", "input_dim", ":", "x", "=", "x", ".", "reshape", "(", "(", "1", ",", "input_dim", ")", ")", "return", "x" ]
Reshapes x into a matrix with input_dim columns
[ "Reshapes", "x", "into", "a", "matrix", "with", "input_dim", "columns" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/util/general.py#L76-L84
242,005
SheffieldML/GPyOpt
GPyOpt/util/general.py
spawn
def spawn(f): ''' Function for parallel evaluation of the acquisition function ''' def fun(pipe,x): pipe.send(f(x)) pipe.close() return fun
python
def spawn(f): ''' Function for parallel evaluation of the acquisition function ''' def fun(pipe,x): pipe.send(f(x)) pipe.close() return fun
[ "def", "spawn", "(", "f", ")", ":", "def", "fun", "(", "pipe", ",", "x", ")", ":", "pipe", ".", "send", "(", "f", "(", "x", ")", ")", "pipe", ".", "close", "(", ")", "return", "fun" ]
Function for parallel evaluation of the acquisition function
[ "Function", "for", "parallel", "evaluation", "of", "the", "acquisition", "function" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/util/general.py#L144-L151
242,006
SheffieldML/GPyOpt
GPyOpt/util/general.py
values_to_array
def values_to_array(input_values): ''' Transforms a values of int, float and tuples to a column vector numpy array ''' if type(input_values)==tuple: values = np.array(input_values).reshape(-1,1) elif type(input_values) == np.ndarray: values = np.atleast_2d(input_values) elif type(input_values)==int or type(input_values)==float or type(np.int64): values = np.atleast_2d(np.array(input_values)) else: print('Type to transform not recognized') return values
python
def values_to_array(input_values): ''' Transforms a values of int, float and tuples to a column vector numpy array ''' if type(input_values)==tuple: values = np.array(input_values).reshape(-1,1) elif type(input_values) == np.ndarray: values = np.atleast_2d(input_values) elif type(input_values)==int or type(input_values)==float or type(np.int64): values = np.atleast_2d(np.array(input_values)) else: print('Type to transform not recognized') return values
[ "def", "values_to_array", "(", "input_values", ")", ":", "if", "type", "(", "input_values", ")", "==", "tuple", ":", "values", "=", "np", ".", "array", "(", "input_values", ")", ".", "reshape", "(", "-", "1", ",", "1", ")", "elif", "type", "(", "inpu...
Transforms a values of int, float and tuples to a column vector numpy array
[ "Transforms", "a", "values", "of", "int", "float", "and", "tuples", "to", "a", "column", "vector", "numpy", "array" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/util/general.py#L168-L180
242,007
SheffieldML/GPyOpt
GPyOpt/util/general.py
merge_values
def merge_values(values1,values2): ''' Merges two numpy arrays by calculating all possible combinations of rows ''' array1 = values_to_array(values1) array2 = values_to_array(values2) if array1.size == 0: return array2 if array2.size == 0: return array1 merged_array = [] for row_array1 in array1: for row_array2 in array2: merged_row = np.hstack((row_array1,row_array2)) merged_array.append(merged_row) return np.atleast_2d(merged_array)
python
def merge_values(values1,values2): ''' Merges two numpy arrays by calculating all possible combinations of rows ''' array1 = values_to_array(values1) array2 = values_to_array(values2) if array1.size == 0: return array2 if array2.size == 0: return array1 merged_array = [] for row_array1 in array1: for row_array2 in array2: merged_row = np.hstack((row_array1,row_array2)) merged_array.append(merged_row) return np.atleast_2d(merged_array)
[ "def", "merge_values", "(", "values1", ",", "values2", ")", ":", "array1", "=", "values_to_array", "(", "values1", ")", "array2", "=", "values_to_array", "(", "values2", ")", "if", "array1", ".", "size", "==", "0", ":", "return", "array2", "if", "array2", ...
Merges two numpy arrays by calculating all possible combinations of rows
[ "Merges", "two", "numpy", "arrays", "by", "calculating", "all", "possible", "combinations", "of", "rows" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/util/general.py#L183-L200
242,008
SheffieldML/GPyOpt
GPyOpt/util/general.py
normalize
def normalize(Y, normalization_type='stats'): """Normalize the vector Y using statistics or its range. :param Y: Row or column vector that you want to normalize. :param normalization_type: String specifying the kind of normalization to use. Options are 'stats' to use mean and standard deviation, or 'maxmin' to use the range of function values. :return Y_normalized: The normalized vector. """ Y = np.asarray(Y, dtype=float) if np.max(Y.shape) != Y.size: raise NotImplementedError('Only 1-dimensional arrays are supported.') # Only normalize with non null sdev (divide by zero). For only one # data point both std and ptp return 0. if normalization_type == 'stats': Y_norm = Y - Y.mean() std = Y.std() if std > 0: Y_norm /= std elif normalization_type == 'maxmin': Y_norm = Y - Y.min() y_range = np.ptp(Y) if y_range > 0: Y_norm /= y_range # A range of [-1, 1] is more natural for a zero-mean GP Y_norm = 2 * (Y_norm - 0.5) else: raise ValueError('Unknown normalization type: {}'.format(normalization_type)) return Y_norm
python
def normalize(Y, normalization_type='stats'): Y = np.asarray(Y, dtype=float) if np.max(Y.shape) != Y.size: raise NotImplementedError('Only 1-dimensional arrays are supported.') # Only normalize with non null sdev (divide by zero). For only one # data point both std and ptp return 0. if normalization_type == 'stats': Y_norm = Y - Y.mean() std = Y.std() if std > 0: Y_norm /= std elif normalization_type == 'maxmin': Y_norm = Y - Y.min() y_range = np.ptp(Y) if y_range > 0: Y_norm /= y_range # A range of [-1, 1] is more natural for a zero-mean GP Y_norm = 2 * (Y_norm - 0.5) else: raise ValueError('Unknown normalization type: {}'.format(normalization_type)) return Y_norm
[ "def", "normalize", "(", "Y", ",", "normalization_type", "=", "'stats'", ")", ":", "Y", "=", "np", ".", "asarray", "(", "Y", ",", "dtype", "=", "float", ")", "if", "np", ".", "max", "(", "Y", ".", "shape", ")", "!=", "Y", ".", "size", ":", "rai...
Normalize the vector Y using statistics or its range. :param Y: Row or column vector that you want to normalize. :param normalization_type: String specifying the kind of normalization to use. Options are 'stats' to use mean and standard deviation, or 'maxmin' to use the range of function values. :return Y_normalized: The normalized vector.
[ "Normalize", "the", "vector", "Y", "using", "statistics", "or", "its", "range", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/util/general.py#L203-L234
242,009
SheffieldML/GPyOpt
GPyOpt/experiment_design/grid_design.py
GridDesign.get_samples
def get_samples(self, init_points_count): """ This method may return less points than requested. The total number of generated points is the smallest closest integer of n^d to the selected amount of points. """ init_points_count = self._adjust_init_points_count(init_points_count) samples = np.empty((init_points_count, self.space.dimensionality)) # Use random design to fill non-continuous variables random_design = RandomDesign(self.space) random_design.fill_noncontinous_variables(samples) if self.space.has_continuous(): X_design = multigrid(self.space.get_continuous_bounds(), self.data_per_dimension) samples[:,self.space.get_continuous_dims()] = X_design return samples
python
def get_samples(self, init_points_count): init_points_count = self._adjust_init_points_count(init_points_count) samples = np.empty((init_points_count, self.space.dimensionality)) # Use random design to fill non-continuous variables random_design = RandomDesign(self.space) random_design.fill_noncontinous_variables(samples) if self.space.has_continuous(): X_design = multigrid(self.space.get_continuous_bounds(), self.data_per_dimension) samples[:,self.space.get_continuous_dims()] = X_design return samples
[ "def", "get_samples", "(", "self", ",", "init_points_count", ")", ":", "init_points_count", "=", "self", ".", "_adjust_init_points_count", "(", "init_points_count", ")", "samples", "=", "np", ".", "empty", "(", "(", "init_points_count", ",", "self", ".", "space"...
This method may return less points than requested. The total number of generated points is the smallest closest integer of n^d to the selected amount of points.
[ "This", "method", "may", "return", "less", "points", "than", "requested", ".", "The", "total", "number", "of", "generated", "points", "is", "the", "smallest", "closest", "integer", "of", "n^d", "to", "the", "selected", "amount", "of", "points", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/experiment_design/grid_design.py#L26-L43
242,010
SheffieldML/GPyOpt
GPyOpt/experiment_design/random_design.py
RandomDesign.get_samples_with_constraints
def get_samples_with_constraints(self, init_points_count): """ Draw random samples and only save those that satisfy constraints Finish when required number of samples is generated """ samples = np.empty((0, self.space.dimensionality)) while samples.shape[0] < init_points_count: domain_samples = self.get_samples_without_constraints(init_points_count) valid_indices = (self.space.indicator_constraints(domain_samples) == 1).flatten() if sum(valid_indices) > 0: valid_samples = domain_samples[valid_indices,:] samples = np.vstack((samples,valid_samples)) return samples[0:init_points_count,:]
python
def get_samples_with_constraints(self, init_points_count): samples = np.empty((0, self.space.dimensionality)) while samples.shape[0] < init_points_count: domain_samples = self.get_samples_without_constraints(init_points_count) valid_indices = (self.space.indicator_constraints(domain_samples) == 1).flatten() if sum(valid_indices) > 0: valid_samples = domain_samples[valid_indices,:] samples = np.vstack((samples,valid_samples)) return samples[0:init_points_count,:]
[ "def", "get_samples_with_constraints", "(", "self", ",", "init_points_count", ")", ":", "samples", "=", "np", ".", "empty", "(", "(", "0", ",", "self", ".", "space", ".", "dimensionality", ")", ")", "while", "samples", ".", "shape", "[", "0", "]", "<", ...
Draw random samples and only save those that satisfy constraints Finish when required number of samples is generated
[ "Draw", "random", "samples", "and", "only", "save", "those", "that", "satisfy", "constraints", "Finish", "when", "required", "number", "of", "samples", "is", "generated" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/experiment_design/random_design.py#L21-L35
242,011
SheffieldML/GPyOpt
GPyOpt/experiment_design/random_design.py
RandomDesign.fill_noncontinous_variables
def fill_noncontinous_variables(self, samples): """ Fill sample values to non-continuous variables in place """ init_points_count = samples.shape[0] for (idx, var) in enumerate(self.space.space_expanded): if isinstance(var, DiscreteVariable) or isinstance(var, CategoricalVariable) : sample_var = np.atleast_2d(np.random.choice(var.domain, init_points_count)) samples[:,idx] = sample_var.flatten() # sample in the case of bandit variables elif isinstance(var, BanditVariable): # Bandit variable is represented by a several adjacent columns in the samples array idx_samples = np.random.randint(var.domain.shape[0], size=init_points_count) bandit_idx = np.arange(idx, idx + var.domain.shape[1]) samples[:, bandit_idx] = var.domain[idx_samples,:]
python
def fill_noncontinous_variables(self, samples): init_points_count = samples.shape[0] for (idx, var) in enumerate(self.space.space_expanded): if isinstance(var, DiscreteVariable) or isinstance(var, CategoricalVariable) : sample_var = np.atleast_2d(np.random.choice(var.domain, init_points_count)) samples[:,idx] = sample_var.flatten() # sample in the case of bandit variables elif isinstance(var, BanditVariable): # Bandit variable is represented by a several adjacent columns in the samples array idx_samples = np.random.randint(var.domain.shape[0], size=init_points_count) bandit_idx = np.arange(idx, idx + var.domain.shape[1]) samples[:, bandit_idx] = var.domain[idx_samples,:]
[ "def", "fill_noncontinous_variables", "(", "self", ",", "samples", ")", ":", "init_points_count", "=", "samples", ".", "shape", "[", "0", "]", "for", "(", "idx", ",", "var", ")", "in", "enumerate", "(", "self", ".", "space", ".", "space_expanded", ")", "...
Fill sample values to non-continuous variables in place
[ "Fill", "sample", "values", "to", "non", "-", "continuous", "variables", "in", "place" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/experiment_design/random_design.py#L37-L53
242,012
SheffieldML/GPyOpt
GPyOpt/interface/driver.py
BODriver._get_obj
def _get_obj(self,space): """ Imports the acquisition function. """ obj_func = self.obj_func from ..core.task import SingleObjective return SingleObjective(obj_func, self.config['resources']['cores'], space=space, unfold_args=True)
python
def _get_obj(self,space): obj_func = self.obj_func from ..core.task import SingleObjective return SingleObjective(obj_func, self.config['resources']['cores'], space=space, unfold_args=True)
[ "def", "_get_obj", "(", "self", ",", "space", ")", ":", "obj_func", "=", "self", ".", "obj_func", "from", ".", ".", "core", ".", "task", "import", "SingleObjective", "return", "SingleObjective", "(", "obj_func", ",", "self", ".", "config", "[", "'resources...
Imports the acquisition function.
[ "Imports", "the", "acquisition", "function", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/interface/driver.py#L24-L32
242,013
SheffieldML/GPyOpt
GPyOpt/interface/driver.py
BODriver._get_space
def _get_space(self): """ Imports the domain. """ assert 'space' in self.config, 'The search space is NOT configured!' space_config = self.config['space'] constraint_config = self.config['constraints'] from ..core.task.space import Design_space return Design_space.fromConfig(space_config, constraint_config)
python
def _get_space(self): assert 'space' in self.config, 'The search space is NOT configured!' space_config = self.config['space'] constraint_config = self.config['constraints'] from ..core.task.space import Design_space return Design_space.fromConfig(space_config, constraint_config)
[ "def", "_get_space", "(", "self", ")", ":", "assert", "'space'", "in", "self", ".", "config", ",", "'The search space is NOT configured!'", "space_config", "=", "self", ".", "config", "[", "'space'", "]", "constraint_config", "=", "self", ".", "config", "[", "...
Imports the domain.
[ "Imports", "the", "domain", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/interface/driver.py#L34-L43
242,014
SheffieldML/GPyOpt
GPyOpt/interface/driver.py
BODriver._get_model
def _get_model(self): """ Imports the model. """ from copy import deepcopy model_args = deepcopy(self.config['model']) del model_args['type'] from ..models import select_model return select_model(self.config['model']['type']).fromConfig(model_args)
python
def _get_model(self): from copy import deepcopy model_args = deepcopy(self.config['model']) del model_args['type'] from ..models import select_model return select_model(self.config['model']['type']).fromConfig(model_args)
[ "def", "_get_model", "(", "self", ")", ":", "from", "copy", "import", "deepcopy", "model_args", "=", "deepcopy", "(", "self", ".", "config", "[", "'model'", "]", ")", "del", "model_args", "[", "'type'", "]", "from", ".", ".", "models", "import", "select_...
Imports the model.
[ "Imports", "the", "model", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/interface/driver.py#L45-L55
242,015
SheffieldML/GPyOpt
GPyOpt/interface/driver.py
BODriver._get_acquisition
def _get_acquisition(self, model, space): """ Imports the acquisition """ from copy import deepcopy acqOpt_config = deepcopy(self.config['acquisition']['optimizer']) acqOpt_name = acqOpt_config['name'] del acqOpt_config['name'] from ..optimization import AcquisitionOptimizer acqOpt = AcquisitionOptimizer(space, acqOpt_name, **acqOpt_config) from ..acquisitions import select_acquisition return select_acquisition(self.config['acquisition']['type']).fromConfig(model, space, acqOpt, None, self.config['acquisition'])
python
def _get_acquisition(self, model, space): from copy import deepcopy acqOpt_config = deepcopy(self.config['acquisition']['optimizer']) acqOpt_name = acqOpt_config['name'] del acqOpt_config['name'] from ..optimization import AcquisitionOptimizer acqOpt = AcquisitionOptimizer(space, acqOpt_name, **acqOpt_config) from ..acquisitions import select_acquisition return select_acquisition(self.config['acquisition']['type']).fromConfig(model, space, acqOpt, None, self.config['acquisition'])
[ "def", "_get_acquisition", "(", "self", ",", "model", ",", "space", ")", ":", "from", "copy", "import", "deepcopy", "acqOpt_config", "=", "deepcopy", "(", "self", ".", "config", "[", "'acquisition'", "]", "[", "'optimizer'", "]", ")", "acqOpt_name", "=", "...
Imports the acquisition
[ "Imports", "the", "acquisition" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/interface/driver.py#L58-L71
242,016
SheffieldML/GPyOpt
GPyOpt/interface/driver.py
BODriver._get_acq_evaluator
def _get_acq_evaluator(self, acq): """ Imports the evaluator """ from ..core.evaluators import select_evaluator from copy import deepcopy eval_args = deepcopy(self.config['acquisition']['evaluator']) del eval_args['type'] return select_evaluator(self.config['acquisition']['evaluator']['type'])(acq, **eval_args)
python
def _get_acq_evaluator(self, acq): from ..core.evaluators import select_evaluator from copy import deepcopy eval_args = deepcopy(self.config['acquisition']['evaluator']) del eval_args['type'] return select_evaluator(self.config['acquisition']['evaluator']['type'])(acq, **eval_args)
[ "def", "_get_acq_evaluator", "(", "self", ",", "acq", ")", ":", "from", ".", ".", "core", ".", "evaluators", "import", "select_evaluator", "from", "copy", "import", "deepcopy", "eval_args", "=", "deepcopy", "(", "self", ".", "config", "[", "'acquisition'", "...
Imports the evaluator
[ "Imports", "the", "evaluator" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/interface/driver.py#L73-L82
242,017
SheffieldML/GPyOpt
GPyOpt/interface/driver.py
BODriver._check_stop
def _check_stop(self, iters, elapsed_time, converged): """ Defines the stopping criterion. """ r_c = self.config['resources'] stop = False if converged==0: stop=True if r_c['maximum-iterations'] !='NA' and iters>= r_c['maximum-iterations']: stop = True if r_c['max-run-time'] != 'NA' and elapsed_time/60.>= r_c['max-run-time']: stop = True return stop
python
def _check_stop(self, iters, elapsed_time, converged): r_c = self.config['resources'] stop = False if converged==0: stop=True if r_c['maximum-iterations'] !='NA' and iters>= r_c['maximum-iterations']: stop = True if r_c['max-run-time'] != 'NA' and elapsed_time/60.>= r_c['max-run-time']: stop = True return stop
[ "def", "_check_stop", "(", "self", ",", "iters", ",", "elapsed_time", ",", "converged", ")", ":", "r_c", "=", "self", ".", "config", "[", "'resources'", "]", "stop", "=", "False", "if", "converged", "==", "0", ":", "stop", "=", "True", "if", "r_c", "...
Defines the stopping criterion.
[ "Defines", "the", "stopping", "criterion", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/interface/driver.py#L84-L98
242,018
SheffieldML/GPyOpt
GPyOpt/interface/driver.py
BODriver.run
def run(self): """ Runs the optimization using the previously loaded elements. """ space = self._get_space() obj_func = self._get_obj(space) model = self._get_model() acq = self._get_acquisition(model, space) acq_eval = self._get_acq_evaluator(acq) from ..experiment_design import initial_design X_init = initial_design(self.config['initialization']['type'], space, self.config['initialization']['num-eval']) from ..methods import ModularBayesianOptimization bo = ModularBayesianOptimization(model, space, obj_func, acq, acq_eval, X_init) bo.run_optimization(max_iter = self.config['resources']['maximum-iterations'], max_time = self.config['resources']['max-run-time'] if self.config['resources']['max-run-time']!="NA" else np.inf, eps = self.config['resources']['tolerance'], verbosity=True) return bo
python
def run(self): space = self._get_space() obj_func = self._get_obj(space) model = self._get_model() acq = self._get_acquisition(model, space) acq_eval = self._get_acq_evaluator(acq) from ..experiment_design import initial_design X_init = initial_design(self.config['initialization']['type'], space, self.config['initialization']['num-eval']) from ..methods import ModularBayesianOptimization bo = ModularBayesianOptimization(model, space, obj_func, acq, acq_eval, X_init) bo.run_optimization(max_iter = self.config['resources']['maximum-iterations'], max_time = self.config['resources']['max-run-time'] if self.config['resources']['max-run-time']!="NA" else np.inf, eps = self.config['resources']['tolerance'], verbosity=True) return bo
[ "def", "run", "(", "self", ")", ":", "space", "=", "self", ".", "_get_space", "(", ")", "obj_func", "=", "self", ".", "_get_obj", "(", "space", ")", "model", "=", "self", ".", "_get_model", "(", ")", "acq", "=", "self", ".", "_get_acquisition", "(", ...
Runs the optimization using the previously loaded elements.
[ "Runs", "the", "optimization", "using", "the", "previously", "loaded", "elements", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/interface/driver.py#L100-L119
242,019
SheffieldML/GPyOpt
GPyOpt/optimization/optimizer.py
choose_optimizer
def choose_optimizer(optimizer_name, bounds): """ Selects the type of local optimizer """ if optimizer_name == 'lbfgs': optimizer = OptLbfgs(bounds) elif optimizer_name == 'DIRECT': optimizer = OptDirect(bounds) elif optimizer_name == 'CMA': optimizer = OptCma(bounds) else: raise InvalidVariableNameError('Invalid optimizer selected.') return optimizer
python
def choose_optimizer(optimizer_name, bounds): if optimizer_name == 'lbfgs': optimizer = OptLbfgs(bounds) elif optimizer_name == 'DIRECT': optimizer = OptDirect(bounds) elif optimizer_name == 'CMA': optimizer = OptCma(bounds) else: raise InvalidVariableNameError('Invalid optimizer selected.') return optimizer
[ "def", "choose_optimizer", "(", "optimizer_name", ",", "bounds", ")", ":", "if", "optimizer_name", "==", "'lbfgs'", ":", "optimizer", "=", "OptLbfgs", "(", "bounds", ")", "elif", "optimizer_name", "==", "'DIRECT'", ":", "optimizer", "=", "OptDirect", "(", "bou...
Selects the type of local optimizer
[ "Selects", "the", "type", "of", "local", "optimizer" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/optimization/optimizer.py#L238-L253
242,020
SheffieldML/GPyOpt
GPyOpt/util/arguments_manager.py
ArgumentsManager.evaluator_creator
def evaluator_creator(self, evaluator_type, acquisition, batch_size, model_type, model, space, acquisition_optimizer): """ Acquisition chooser from the available options. Guide the optimization through sequential or parallel evalutions of the objective. """ acquisition_transformation = self.kwargs.get('acquisition_transformation','none') if batch_size == 1 or evaluator_type == 'sequential': return Sequential(acquisition) elif batch_size >1 and (evaluator_type == 'random' or evaluator_type is None): return RandomBatch(acquisition, batch_size) elif batch_size >1 and evaluator_type == 'thompson_sampling': return ThompsonBatch(acquisition, batch_size) elif evaluator_type == 'local_penalization': if model_type not in ['GP', 'sparseGP', 'GP_MCMC', 'warpedGP']: raise InvalidConfigError('local_penalization evaluator can only be used with GP models') if not isinstance(acquisition, AcquisitionLP): acquisition_lp = AcquisitionLP(model, space, acquisition_optimizer, acquisition, acquisition_transformation) return LocalPenalization(acquisition_lp, batch_size)
python
def evaluator_creator(self, evaluator_type, acquisition, batch_size, model_type, model, space, acquisition_optimizer): acquisition_transformation = self.kwargs.get('acquisition_transformation','none') if batch_size == 1 or evaluator_type == 'sequential': return Sequential(acquisition) elif batch_size >1 and (evaluator_type == 'random' or evaluator_type is None): return RandomBatch(acquisition, batch_size) elif batch_size >1 and evaluator_type == 'thompson_sampling': return ThompsonBatch(acquisition, batch_size) elif evaluator_type == 'local_penalization': if model_type not in ['GP', 'sparseGP', 'GP_MCMC', 'warpedGP']: raise InvalidConfigError('local_penalization evaluator can only be used with GP models') if not isinstance(acquisition, AcquisitionLP): acquisition_lp = AcquisitionLP(model, space, acquisition_optimizer, acquisition, acquisition_transformation) return LocalPenalization(acquisition_lp, batch_size)
[ "def", "evaluator_creator", "(", "self", ",", "evaluator_type", ",", "acquisition", ",", "batch_size", ",", "model_type", ",", "model", ",", "space", ",", "acquisition_optimizer", ")", ":", "acquisition_transformation", "=", "self", ".", "kwargs", ".", "get", "(...
Acquisition chooser from the available options. Guide the optimization through sequential or parallel evalutions of the objective.
[ "Acquisition", "chooser", "from", "the", "available", "options", ".", "Guide", "the", "optimization", "through", "sequential", "or", "parallel", "evalutions", "of", "the", "objective", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/util/arguments_manager.py#L17-L38
242,021
SheffieldML/GPyOpt
GPyOpt/acquisitions/LCB.py
AcquisitionLCB._compute_acq
def _compute_acq(self, x): """ Computes the GP-Lower Confidence Bound """ m, s = self.model.predict(x) f_acqu = -m + self.exploration_weight * s return f_acqu
python
def _compute_acq(self, x): m, s = self.model.predict(x) f_acqu = -m + self.exploration_weight * s return f_acqu
[ "def", "_compute_acq", "(", "self", ",", "x", ")", ":", "m", ",", "s", "=", "self", ".", "model", ".", "predict", "(", "x", ")", "f_acqu", "=", "-", "m", "+", "self", ".", "exploration_weight", "*", "s", "return", "f_acqu" ]
Computes the GP-Lower Confidence Bound
[ "Computes", "the", "GP", "-", "Lower", "Confidence", "Bound" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/acquisitions/LCB.py#L31-L37
242,022
SheffieldML/GPyOpt
GPyOpt/acquisitions/LCB.py
AcquisitionLCB._compute_acq_withGradients
def _compute_acq_withGradients(self, x): """ Computes the GP-Lower Confidence Bound and its derivative """ m, s, dmdx, dsdx = self.model.predict_withGradients(x) f_acqu = -m + self.exploration_weight * s df_acqu = -dmdx + self.exploration_weight * dsdx return f_acqu, df_acqu
python
def _compute_acq_withGradients(self, x): m, s, dmdx, dsdx = self.model.predict_withGradients(x) f_acqu = -m + self.exploration_weight * s df_acqu = -dmdx + self.exploration_weight * dsdx return f_acqu, df_acqu
[ "def", "_compute_acq_withGradients", "(", "self", ",", "x", ")", ":", "m", ",", "s", ",", "dmdx", ",", "dsdx", "=", "self", ".", "model", ".", "predict_withGradients", "(", "x", ")", "f_acqu", "=", "-", "m", "+", "self", ".", "exploration_weight", "*",...
Computes the GP-Lower Confidence Bound and its derivative
[ "Computes", "the", "GP", "-", "Lower", "Confidence", "Bound", "and", "its", "derivative" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/acquisitions/LCB.py#L39-L46
242,023
SheffieldML/GPyOpt
GPyOpt/core/task/variables.py
create_variable
def create_variable(descriptor): """ Creates a variable from a dictionary descriptor """ if descriptor['type'] == 'continuous': return ContinuousVariable(descriptor['name'], descriptor['domain'], descriptor.get('dimensionality', 1)) elif descriptor['type'] == 'bandit': return BanditVariable(descriptor['name'], descriptor['domain'], descriptor.get('dimensionality', None)) # bandits variables cannot be repeated elif descriptor['type'] == 'discrete': return DiscreteVariable(descriptor['name'], descriptor['domain'], descriptor.get('dimensionality', 1)) elif descriptor['type'] == 'categorical': return CategoricalVariable(descriptor['name'], descriptor['domain'], descriptor.get('dimensionality', 1)) else: raise InvalidConfigError('Unknown variable type ' + descriptor['type'])
python
def create_variable(descriptor): if descriptor['type'] == 'continuous': return ContinuousVariable(descriptor['name'], descriptor['domain'], descriptor.get('dimensionality', 1)) elif descriptor['type'] == 'bandit': return BanditVariable(descriptor['name'], descriptor['domain'], descriptor.get('dimensionality', None)) # bandits variables cannot be repeated elif descriptor['type'] == 'discrete': return DiscreteVariable(descriptor['name'], descriptor['domain'], descriptor.get('dimensionality', 1)) elif descriptor['type'] == 'categorical': return CategoricalVariable(descriptor['name'], descriptor['domain'], descriptor.get('dimensionality', 1)) else: raise InvalidConfigError('Unknown variable type ' + descriptor['type'])
[ "def", "create_variable", "(", "descriptor", ")", ":", "if", "descriptor", "[", "'type'", "]", "==", "'continuous'", ":", "return", "ContinuousVariable", "(", "descriptor", "[", "'name'", "]", ",", "descriptor", "[", "'domain'", "]", ",", "descriptor", ".", ...
Creates a variable from a dictionary descriptor
[ "Creates", "a", "variable", "from", "a", "dictionary", "descriptor" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/variables.py#L230-L243
242,024
SheffieldML/GPyOpt
GPyOpt/core/task/variables.py
Variable.expand
def expand(self): """ Builds a list of single dimensional variables representing current variable. Examples: For single dimensional variable, it is returned as is discrete of (0,2,4) -> discrete of (0,2,4) For multi dimensional variable, a list of variables is returned, each representing a single dimension continuous {0<=x<=1, 2<=y<=3} -> continuous {0<=x<=1}, continuous {2<=y<=3} """ expanded_variables = [] for i in range(self.dimensionality): one_d_variable = deepcopy(self) one_d_variable.dimensionality = 1 if self.dimensionality > 1: one_d_variable.name = '{}_{}'.format(self.name, i+1) else: one_d_variable.name = self.name one_d_variable.dimensionality_in_model = 1 expanded_variables.append(one_d_variable) return expanded_variables
python
def expand(self): expanded_variables = [] for i in range(self.dimensionality): one_d_variable = deepcopy(self) one_d_variable.dimensionality = 1 if self.dimensionality > 1: one_d_variable.name = '{}_{}'.format(self.name, i+1) else: one_d_variable.name = self.name one_d_variable.dimensionality_in_model = 1 expanded_variables.append(one_d_variable) return expanded_variables
[ "def", "expand", "(", "self", ")", ":", "expanded_variables", "=", "[", "]", "for", "i", "in", "range", "(", "self", ".", "dimensionality", ")", ":", "one_d_variable", "=", "deepcopy", "(", "self", ")", "one_d_variable", ".", "dimensionality", "=", "1", ...
Builds a list of single dimensional variables representing current variable. Examples: For single dimensional variable, it is returned as is discrete of (0,2,4) -> discrete of (0,2,4) For multi dimensional variable, a list of variables is returned, each representing a single dimension continuous {0<=x<=1, 2<=y<=3} -> continuous {0<=x<=1}, continuous {2<=y<=3}
[ "Builds", "a", "list", "of", "single", "dimensional", "variables", "representing", "current", "variable", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/variables.py#L16-L36
242,025
SheffieldML/GPyOpt
GPyOpt/core/task/variables.py
ContinuousVariable.round
def round(self, value_array): """ If value falls within bounds, just return it otherwise return min or max, whichever is closer to the value Assumes an 1d array with a single element as an input. """ min_value = self.domain[0] max_value = self.domain[1] rounded_value = value_array[0] if rounded_value < min_value: rounded_value = min_value elif rounded_value > max_value: rounded_value = max_value return [rounded_value]
python
def round(self, value_array): min_value = self.domain[0] max_value = self.domain[1] rounded_value = value_array[0] if rounded_value < min_value: rounded_value = min_value elif rounded_value > max_value: rounded_value = max_value return [rounded_value]
[ "def", "round", "(", "self", ",", "value_array", ")", ":", "min_value", "=", "self", ".", "domain", "[", "0", "]", "max_value", "=", "self", ".", "domain", "[", "1", "]", "rounded_value", "=", "value_array", "[", "0", "]", "if", "rounded_value", "<", ...
If value falls within bounds, just return it otherwise return min or max, whichever is closer to the value Assumes an 1d array with a single element as an input.
[ "If", "value", "falls", "within", "bounds", "just", "return", "it", "otherwise", "return", "min", "or", "max", "whichever", "is", "closer", "to", "the", "value", "Assumes", "an", "1d", "array", "with", "a", "single", "element", "as", "an", "input", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/variables.py#L100-L116
242,026
SheffieldML/GPyOpt
GPyOpt/core/task/variables.py
BanditVariable.round
def round(self, value_array): """ Rounds a bandit variable by selecting the closest point in the domain Closest here is defined by euclidian distance Assumes an 1d array of the same length as the single variable value """ distances = np.linalg.norm(np.array(self.domain) - value_array, axis=1) idx = np.argmin(distances) return [self.domain[idx]]
python
def round(self, value_array): distances = np.linalg.norm(np.array(self.domain) - value_array, axis=1) idx = np.argmin(distances) return [self.domain[idx]]
[ "def", "round", "(", "self", ",", "value_array", ")", ":", "distances", "=", "np", ".", "linalg", ".", "norm", "(", "np", ".", "array", "(", "self", ".", "domain", ")", "-", "value_array", ",", "axis", "=", "1", ")", "idx", "=", "np", ".", "argmi...
Rounds a bandit variable by selecting the closest point in the domain Closest here is defined by euclidian distance Assumes an 1d array of the same length as the single variable value
[ "Rounds", "a", "bandit", "variable", "by", "selecting", "the", "closest", "point", "in", "the", "domain", "Closest", "here", "is", "defined", "by", "euclidian", "distance", "Assumes", "an", "1d", "array", "of", "the", "same", "length", "as", "the", "single",...
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/variables.py#L151-L159
242,027
SheffieldML/GPyOpt
GPyOpt/core/task/variables.py
DiscreteVariable.round
def round(self, value_array): """ Rounds a discrete variable by selecting the closest point in the domain Assumes an 1d array with a single element as an input. """ value = value_array[0] rounded_value = self.domain[0] for domain_value in self.domain: if np.abs(domain_value - value) < np.abs(rounded_value - value): rounded_value = domain_value return [rounded_value]
python
def round(self, value_array): value = value_array[0] rounded_value = self.domain[0] for domain_value in self.domain: if np.abs(domain_value - value) < np.abs(rounded_value - value): rounded_value = domain_value return [rounded_value]
[ "def", "round", "(", "self", ",", "value_array", ")", ":", "value", "=", "value_array", "[", "0", "]", "rounded_value", "=", "self", ".", "domain", "[", "0", "]", "for", "domain_value", "in", "self", ".", "domain", ":", "if", "np", ".", "abs", "(", ...
Rounds a discrete variable by selecting the closest point in the domain Assumes an 1d array with a single element as an input.
[ "Rounds", "a", "discrete", "variable", "by", "selecting", "the", "closest", "point", "in", "the", "domain", "Assumes", "an", "1d", "array", "with", "a", "single", "element", "as", "an", "input", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/variables.py#L175-L187
242,028
SheffieldML/GPyOpt
GPyOpt/optimization/acquisition_optimizer.py
AcquisitionOptimizer.optimize
def optimize(self, f=None, df=None, f_df=None, duplicate_manager=None): """ Optimizes the input function. :param f: function to optimize. :param df: gradient of the function to optimize. :param f_df: returns both the function to optimize and its gradient. """ self.f = f self.df = df self.f_df = f_df ## --- Update the optimizer, in case context has beee passed. self.optimizer = choose_optimizer(self.optimizer_name, self.context_manager.noncontext_bounds) ## --- Selecting the anchor points and removing duplicates if self.type_anchor_points_logic == max_objective_anchor_points_logic: anchor_points_generator = ObjectiveAnchorPointsGenerator(self.space, random_design_type, f) elif self.type_anchor_points_logic == thompson_sampling_anchor_points_logic: anchor_points_generator = ThompsonSamplingAnchorPointsGenerator(self.space, sobol_design_type, self.model) ## -- Select the anchor points (with context) anchor_points = anchor_points_generator.get(duplicate_manager=duplicate_manager, context_manager=self.context_manager) ## --- Applying local optimizers at the anchor points and update bounds of the optimizer (according to the context) optimized_points = [apply_optimizer(self.optimizer, a, f=f, df=None, f_df=f_df, duplicate_manager=duplicate_manager, context_manager=self.context_manager, space = self.space) for a in anchor_points] x_min, fx_min = min(optimized_points, key=lambda t:t[1]) #x_min, fx_min = min([apply_optimizer(self.optimizer, a, f=f, df=None, f_df=f_df, duplicate_manager=duplicate_manager, context_manager=self.context_manager, space = self.space) for a in anchor_points], key=lambda t:t[1]) return x_min, fx_min
python
def optimize(self, f=None, df=None, f_df=None, duplicate_manager=None): self.f = f self.df = df self.f_df = f_df ## --- Update the optimizer, in case context has beee passed. self.optimizer = choose_optimizer(self.optimizer_name, self.context_manager.noncontext_bounds) ## --- Selecting the anchor points and removing duplicates if self.type_anchor_points_logic == max_objective_anchor_points_logic: anchor_points_generator = ObjectiveAnchorPointsGenerator(self.space, random_design_type, f) elif self.type_anchor_points_logic == thompson_sampling_anchor_points_logic: anchor_points_generator = ThompsonSamplingAnchorPointsGenerator(self.space, sobol_design_type, self.model) ## -- Select the anchor points (with context) anchor_points = anchor_points_generator.get(duplicate_manager=duplicate_manager, context_manager=self.context_manager) ## --- Applying local optimizers at the anchor points and update bounds of the optimizer (according to the context) optimized_points = [apply_optimizer(self.optimizer, a, f=f, df=None, f_df=f_df, duplicate_manager=duplicate_manager, context_manager=self.context_manager, space = self.space) for a in anchor_points] x_min, fx_min = min(optimized_points, key=lambda t:t[1]) #x_min, fx_min = min([apply_optimizer(self.optimizer, a, f=f, df=None, f_df=f_df, duplicate_manager=duplicate_manager, context_manager=self.context_manager, space = self.space) for a in anchor_points], key=lambda t:t[1]) return x_min, fx_min
[ "def", "optimize", "(", "self", ",", "f", "=", "None", ",", "df", "=", "None", ",", "f_df", "=", "None", ",", "duplicate_manager", "=", "None", ")", ":", "self", ".", "f", "=", "f", "self", ".", "df", "=", "df", "self", ".", "f_df", "=", "f_df"...
Optimizes the input function. :param f: function to optimize. :param df: gradient of the function to optimize. :param f_df: returns both the function to optimize and its gradient.
[ "Optimizes", "the", "input", "function", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/optimization/acquisition_optimizer.py#L46-L77
242,029
SheffieldML/GPyOpt
GPyOpt/core/task/objective.py
SingleObjective.evaluate
def evaluate(self, x): """ Performs the evaluation of the objective at x. """ if self.n_procs == 1: f_evals, cost_evals = self._eval_func(x) else: try: f_evals, cost_evals = self._syncronous_batch_evaluation(x) except: if not hasattr(self, 'parallel_error'): print('Error in parallel computation. Fall back to single process!') else: self.parallel_error = True f_evals, cost_evals = self._eval_func(x) return f_evals, cost_evals
python
def evaluate(self, x): if self.n_procs == 1: f_evals, cost_evals = self._eval_func(x) else: try: f_evals, cost_evals = self._syncronous_batch_evaluation(x) except: if not hasattr(self, 'parallel_error'): print('Error in parallel computation. Fall back to single process!') else: self.parallel_error = True f_evals, cost_evals = self._eval_func(x) return f_evals, cost_evals
[ "def", "evaluate", "(", "self", ",", "x", ")", ":", "if", "self", ".", "n_procs", "==", "1", ":", "f_evals", ",", "cost_evals", "=", "self", ".", "_eval_func", "(", "x", ")", "else", ":", "try", ":", "f_evals", ",", "cost_evals", "=", "self", ".", ...
Performs the evaluation of the objective at x.
[ "Performs", "the", "evaluation", "of", "the", "objective", "at", "x", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/objective.py#L44-L61
242,030
SheffieldML/GPyOpt
GPyOpt/core/task/objective.py
SingleObjective._syncronous_batch_evaluation
def _syncronous_batch_evaluation(self,x): """ Evaluates the function a x, where x can be a single location or a batch. The evaluation is performed in parallel according to the number of accessible cores. """ from multiprocessing import Process, Pipe # --- parallel evaluation of the function divided_samples = [x[i::self.n_procs] for i in range(self.n_procs)] pipe = [Pipe() for i in range(self.n_procs)] proc = [Process(target=spawn(self._eval_func),args=(c,k)) for k,(p,c) in zip(divided_samples,pipe)] [p.start() for p in proc] [p.join() for p in proc] # --- time of evaluation is set to constant (=1). This is one of the hypothesis of synchronous batch methods. f_evals = np.zeros((x.shape[0],1)) cost_evals = np.ones((x.shape[0],1)) i = 0 for (p,c) in pipe: f_evals[i::self.n_procs] = p.recv()[0] # throw away costs i += 1 return f_evals, cost_evals
python
def _syncronous_batch_evaluation(self,x): from multiprocessing import Process, Pipe # --- parallel evaluation of the function divided_samples = [x[i::self.n_procs] for i in range(self.n_procs)] pipe = [Pipe() for i in range(self.n_procs)] proc = [Process(target=spawn(self._eval_func),args=(c,k)) for k,(p,c) in zip(divided_samples,pipe)] [p.start() for p in proc] [p.join() for p in proc] # --- time of evaluation is set to constant (=1). This is one of the hypothesis of synchronous batch methods. f_evals = np.zeros((x.shape[0],1)) cost_evals = np.ones((x.shape[0],1)) i = 0 for (p,c) in pipe: f_evals[i::self.n_procs] = p.recv()[0] # throw away costs i += 1 return f_evals, cost_evals
[ "def", "_syncronous_batch_evaluation", "(", "self", ",", "x", ")", ":", "from", "multiprocessing", "import", "Process", ",", "Pipe", "# --- parallel evaluation of the function", "divided_samples", "=", "[", "x", "[", "i", ":", ":", "self", ".", "n_procs", "]", "...
Evaluates the function a x, where x can be a single location or a batch. The evaluation is performed in parallel according to the number of accessible cores.
[ "Evaluates", "the", "function", "a", "x", "where", "x", "can", "be", "a", "single", "location", "or", "a", "batch", ".", "The", "evaluation", "is", "performed", "in", "parallel", "according", "to", "the", "number", "of", "accessible", "cores", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/objective.py#L80-L101
242,031
SheffieldML/GPyOpt
GPyOpt/core/evaluators/sequential.py
Sequential.compute_batch
def compute_batch(self, duplicate_manager=None,context_manager=None): """ Selects the new location to evaluate the objective. """ x, _ = self.acquisition.optimize(duplicate_manager=duplicate_manager) return x
python
def compute_batch(self, duplicate_manager=None,context_manager=None): x, _ = self.acquisition.optimize(duplicate_manager=duplicate_manager) return x
[ "def", "compute_batch", "(", "self", ",", "duplicate_manager", "=", "None", ",", "context_manager", "=", "None", ")", ":", "x", ",", "_", "=", "self", ".", "acquisition", ".", "optimize", "(", "duplicate_manager", "=", "duplicate_manager", ")", "return", "x"...
Selects the new location to evaluate the objective.
[ "Selects", "the", "new", "location", "to", "evaluate", "the", "objective", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/evaluators/sequential.py#L18-L23
242,032
SheffieldML/GPyOpt
GPyOpt/acquisitions/LCB_mcmc.py
AcquisitionLCB_MCMC._compute_acq
def _compute_acq(self,x): """ Integrated GP-Lower Confidence Bound """ means, stds = self.model.predict(x) f_acqu = 0 for m,s in zip(means, stds): f_acqu += -m + self.exploration_weight * s return f_acqu/(len(means))
python
def _compute_acq(self,x): means, stds = self.model.predict(x) f_acqu = 0 for m,s in zip(means, stds): f_acqu += -m + self.exploration_weight * s return f_acqu/(len(means))
[ "def", "_compute_acq", "(", "self", ",", "x", ")", ":", "means", ",", "stds", "=", "self", ".", "model", ".", "predict", "(", "x", ")", "f_acqu", "=", "0", "for", "m", ",", "s", "in", "zip", "(", "means", ",", "stds", ")", ":", "f_acqu", "+=", ...
Integrated GP-Lower Confidence Bound
[ "Integrated", "GP", "-", "Lower", "Confidence", "Bound" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/acquisitions/LCB_mcmc.py#L26-L34
242,033
SheffieldML/GPyOpt
GPyOpt/acquisitions/LCB_mcmc.py
AcquisitionLCB_MCMC._compute_acq_withGradients
def _compute_acq_withGradients(self, x): """ Integrated GP-Lower Confidence Bound and its derivative """ means, stds, dmdxs, dsdxs = self.model.predict_withGradients(x) f_acqu = None df_acqu = None for m, s, dmdx, dsdx in zip(means, stds, dmdxs, dsdxs): f = -m + self.exploration_weight * s df = -dmdx + self.exploration_weight * dsdx if f_acqu is None: f_acqu = f df_acqu = df else: f_acqu += f df_acqu += df return f_acqu/(len(means)), df_acqu/(len(means))
python
def _compute_acq_withGradients(self, x): means, stds, dmdxs, dsdxs = self.model.predict_withGradients(x) f_acqu = None df_acqu = None for m, s, dmdx, dsdx in zip(means, stds, dmdxs, dsdxs): f = -m + self.exploration_weight * s df = -dmdx + self.exploration_weight * dsdx if f_acqu is None: f_acqu = f df_acqu = df else: f_acqu += f df_acqu += df return f_acqu/(len(means)), df_acqu/(len(means))
[ "def", "_compute_acq_withGradients", "(", "self", ",", "x", ")", ":", "means", ",", "stds", ",", "dmdxs", ",", "dsdxs", "=", "self", ".", "model", ".", "predict_withGradients", "(", "x", ")", "f_acqu", "=", "None", "df_acqu", "=", "None", "for", "m", "...
Integrated GP-Lower Confidence Bound and its derivative
[ "Integrated", "GP", "-", "Lower", "Confidence", "Bound", "and", "its", "derivative" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/acquisitions/LCB_mcmc.py#L36-L52
242,034
SheffieldML/GPyOpt
GPyOpt/core/task/space.py
Design_space._expand_config_space
def _expand_config_space(self): """ Expands the config input space into a list of diccionaries, one for each variable_dic in which the dimensionality is always one. Example: It would transform config_space =[ {'name': 'var_1', 'type': 'continuous', 'domain':(-1,1), 'dimensionality':1}, {'name': 'var_2', 'type': 'continuous', 'domain':(-3,1), 'dimensionality':2}, into config_expande_space =[ {'name': 'var_1', 'type': 'continuous', 'domain':(-1,1), 'dimensionality':1}, {'name': 'var_2', 'type': 'continuous', 'domain':(-3,1), 'dimensionality':1}, {'name': 'var_2_1', 'type': 'continuous', 'domain':(-3,1), 'dimensionality':1}] """ self.config_space_expanded = [] for variable in self.config_space: variable_dic = variable.copy() if 'dimensionality' in variable_dic.keys(): dimensionality = variable_dic['dimensionality'] variable_dic['dimensionality'] = 1 variables_set = [variable_dic.copy() for d in range(dimensionality)] k=1 for variable in variables_set: variable['name'] = variable['name'] + '_'+str(k) k+=1 self.config_space_expanded += variables_set else: self.config_space_expanded += [variable_dic]
python
def _expand_config_space(self): self.config_space_expanded = [] for variable in self.config_space: variable_dic = variable.copy() if 'dimensionality' in variable_dic.keys(): dimensionality = variable_dic['dimensionality'] variable_dic['dimensionality'] = 1 variables_set = [variable_dic.copy() for d in range(dimensionality)] k=1 for variable in variables_set: variable['name'] = variable['name'] + '_'+str(k) k+=1 self.config_space_expanded += variables_set else: self.config_space_expanded += [variable_dic]
[ "def", "_expand_config_space", "(", "self", ")", ":", "self", ".", "config_space_expanded", "=", "[", "]", "for", "variable", "in", "self", ".", "config_space", ":", "variable_dic", "=", "variable", ".", "copy", "(", ")", "if", "'dimensionality'", "in", "var...
Expands the config input space into a list of diccionaries, one for each variable_dic in which the dimensionality is always one. Example: It would transform config_space =[ {'name': 'var_1', 'type': 'continuous', 'domain':(-1,1), 'dimensionality':1}, {'name': 'var_2', 'type': 'continuous', 'domain':(-3,1), 'dimensionality':2}, into config_expande_space =[ {'name': 'var_1', 'type': 'continuous', 'domain':(-1,1), 'dimensionality':1}, {'name': 'var_2', 'type': 'continuous', 'domain':(-3,1), 'dimensionality':1}, {'name': 'var_2_1', 'type': 'continuous', 'domain':(-3,1), 'dimensionality':1}]
[ "Expands", "the", "config", "input", "space", "into", "a", "list", "of", "diccionaries", "one", "for", "each", "variable_dic", "in", "which", "the", "dimensionality", "is", "always", "one", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/space.py#L101-L131
242,035
SheffieldML/GPyOpt
GPyOpt/core/task/space.py
Design_space._create_variables_dic
def _create_variables_dic(self): """ Returns the variable by passing its name """ self.name_to_variable = {} for variable in self.space_expanded: self.name_to_variable[variable.name] = variable
python
def _create_variables_dic(self): self.name_to_variable = {} for variable in self.space_expanded: self.name_to_variable[variable.name] = variable
[ "def", "_create_variables_dic", "(", "self", ")", ":", "self", ".", "name_to_variable", "=", "{", "}", "for", "variable", "in", "self", ".", "space_expanded", ":", "self", ".", "name_to_variable", "[", "variable", ".", "name", "]", "=", "variable" ]
Returns the variable by passing its name
[ "Returns", "the", "variable", "by", "passing", "its", "name" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/space.py#L162-L168
242,036
SheffieldML/GPyOpt
GPyOpt/core/task/space.py
Design_space._translate_space
def _translate_space(self, space): """ Translates a list of dictionaries into internal list of variables """ self.space = [] self.dimensionality = 0 self.has_types = d = {t: False for t in self.supported_types} for i, d in enumerate(space): descriptor = deepcopy(d) descriptor['name'] = descriptor.get('name', 'var_' + str(i)) descriptor['type'] = descriptor.get('type', 'continuous') if 'domain' not in descriptor: raise InvalidConfigError('Domain attribute is missing for variable ' + descriptor['name']) variable = create_variable(descriptor) self.space.append(variable) self.dimensionality += variable.dimensionality self.has_types[variable.type] = True # Check if there are any bandit and non-bandit variables together in the space if any(v.is_bandit() for v in self.space) and any(not v.is_bandit() for v in self.space): raise InvalidConfigError('Invalid mixed domain configuration. Bandit variables cannot be mixed with other types.')
python
def _translate_space(self, space): self.space = [] self.dimensionality = 0 self.has_types = d = {t: False for t in self.supported_types} for i, d in enumerate(space): descriptor = deepcopy(d) descriptor['name'] = descriptor.get('name', 'var_' + str(i)) descriptor['type'] = descriptor.get('type', 'continuous') if 'domain' not in descriptor: raise InvalidConfigError('Domain attribute is missing for variable ' + descriptor['name']) variable = create_variable(descriptor) self.space.append(variable) self.dimensionality += variable.dimensionality self.has_types[variable.type] = True # Check if there are any bandit and non-bandit variables together in the space if any(v.is_bandit() for v in self.space) and any(not v.is_bandit() for v in self.space): raise InvalidConfigError('Invalid mixed domain configuration. Bandit variables cannot be mixed with other types.')
[ "def", "_translate_space", "(", "self", ",", "space", ")", ":", "self", ".", "space", "=", "[", "]", "self", ".", "dimensionality", "=", "0", "self", ".", "has_types", "=", "d", "=", "{", "t", ":", "False", "for", "t", "in", "self", ".", "supported...
Translates a list of dictionaries into internal list of variables
[ "Translates", "a", "list", "of", "dictionaries", "into", "internal", "list", "of", "variables" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/space.py#L170-L191
242,037
SheffieldML/GPyOpt
GPyOpt/core/task/space.py
Design_space._expand_space
def _expand_space(self): """ Creates an internal list where the variables with dimensionality larger than one are expanded. This list is the one that is used internally to do the optimization. """ ## --- Expand the config space self._expand_config_space() ## --- Expand the space self.space_expanded = [] for variable in self.space: self.space_expanded += variable.expand()
python
def _expand_space(self): ## --- Expand the config space self._expand_config_space() ## --- Expand the space self.space_expanded = [] for variable in self.space: self.space_expanded += variable.expand()
[ "def", "_expand_space", "(", "self", ")", ":", "## --- Expand the config space", "self", ".", "_expand_config_space", "(", ")", "## --- Expand the space", "self", ".", "space_expanded", "=", "[", "]", "for", "variable", "in", "self", ".", "space", ":", "self", "...
Creates an internal list where the variables with dimensionality larger than one are expanded. This list is the one that is used internally to do the optimization.
[ "Creates", "an", "internal", "list", "where", "the", "variables", "with", "dimensionality", "larger", "than", "one", "are", "expanded", ".", "This", "list", "is", "the", "one", "that", "is", "used", "internally", "to", "do", "the", "optimization", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/space.py#L193-L205
242,038
SheffieldML/GPyOpt
GPyOpt/core/task/space.py
Design_space.objective_to_model
def objective_to_model(self, x_objective): ''' This function serves as interface between objective input vectors and model input vectors''' x_model = [] for k in range(self.objective_dimensionality): variable = self.space_expanded[k] new_entry = variable.objective_to_model(x_objective[0,k]) x_model += new_entry return x_model
python
def objective_to_model(self, x_objective): ''' This function serves as interface between objective input vectors and model input vectors''' x_model = [] for k in range(self.objective_dimensionality): variable = self.space_expanded[k] new_entry = variable.objective_to_model(x_objective[0,k]) x_model += new_entry return x_model
[ "def", "objective_to_model", "(", "self", ",", "x_objective", ")", ":", "x_model", "=", "[", "]", "for", "k", "in", "range", "(", "self", ".", "objective_dimensionality", ")", ":", "variable", "=", "self", ".", "space_expanded", "[", "k", "]", "new_entry",...
This function serves as interface between objective input vectors and model input vectors
[ "This", "function", "serves", "as", "interface", "between", "objective", "input", "vectors", "and", "model", "input", "vectors" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/space.py#L207-L218
242,039
SheffieldML/GPyOpt
GPyOpt/core/task/space.py
Design_space.model_to_objective
def model_to_objective(self, x_model): ''' This function serves as interface between model input vectors and objective input vectors ''' idx_model = 0 x_objective = [] for idx_obj in range(self.objective_dimensionality): variable = self.space_expanded[idx_obj] new_entry = variable.model_to_objective(x_model, idx_model) x_objective += new_entry idx_model += variable.dimensionality_in_model return x_objective
python
def model_to_objective(self, x_model): ''' This function serves as interface between model input vectors and objective input vectors ''' idx_model = 0 x_objective = [] for idx_obj in range(self.objective_dimensionality): variable = self.space_expanded[idx_obj] new_entry = variable.model_to_objective(x_model, idx_model) x_objective += new_entry idx_model += variable.dimensionality_in_model return x_objective
[ "def", "model_to_objective", "(", "self", ",", "x_model", ")", ":", "idx_model", "=", "0", "x_objective", "=", "[", "]", "for", "idx_obj", "in", "range", "(", "self", ".", "objective_dimensionality", ")", ":", "variable", "=", "self", ".", "space_expanded", ...
This function serves as interface between model input vectors and objective input vectors
[ "This", "function", "serves", "as", "interface", "between", "model", "input", "vectors", "and", "objective", "input", "vectors" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/space.py#L238-L251
242,040
SheffieldML/GPyOpt
GPyOpt/core/task/space.py
Design_space.get_subspace
def get_subspace(self, dims): ''' Extracts subspace from the reference of a list of variables in the inputs of the model. ''' subspace = [] k = 0 for variable in self.space_expanded: if k in dims: subspace.append(variable) k += variable.dimensionality_in_model return subspace
python
def get_subspace(self, dims): ''' Extracts subspace from the reference of a list of variables in the inputs of the model. ''' subspace = [] k = 0 for variable in self.space_expanded: if k in dims: subspace.append(variable) k += variable.dimensionality_in_model return subspace
[ "def", "get_subspace", "(", "self", ",", "dims", ")", ":", "subspace", "=", "[", "]", "k", "=", "0", "for", "variable", "in", "self", ".", "space_expanded", ":", "if", "k", "in", "dims", ":", "subspace", ".", "append", "(", "variable", ")", "k", "+...
Extracts subspace from the reference of a list of variables in the inputs of the model.
[ "Extracts", "subspace", "from", "the", "reference", "of", "a", "list", "of", "variables", "in", "the", "inputs", "of", "the", "model", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/space.py#L283-L294
242,041
SheffieldML/GPyOpt
GPyOpt/core/task/space.py
Design_space.indicator_constraints
def indicator_constraints(self,x): """ Returns array of ones and zeros indicating if x is within the constraints """ x = np.atleast_2d(x) I_x = np.ones((x.shape[0],1)) if self.constraints is not None: for d in self.constraints: try: exec('constraint = lambda x:' + d['constraint'], globals()) ind_x = (constraint(x) <= 0) * 1 I_x *= ind_x.reshape(x.shape[0],1) except: print('Fail to compile the constraint: ' + str(d)) raise return I_x
python
def indicator_constraints(self,x): x = np.atleast_2d(x) I_x = np.ones((x.shape[0],1)) if self.constraints is not None: for d in self.constraints: try: exec('constraint = lambda x:' + d['constraint'], globals()) ind_x = (constraint(x) <= 0) * 1 I_x *= ind_x.reshape(x.shape[0],1) except: print('Fail to compile the constraint: ' + str(d)) raise return I_x
[ "def", "indicator_constraints", "(", "self", ",", "x", ")", ":", "x", "=", "np", ".", "atleast_2d", "(", "x", ")", "I_x", "=", "np", ".", "ones", "(", "(", "x", ".", "shape", "[", "0", "]", ",", "1", ")", ")", "if", "self", ".", "constraints", ...
Returns array of ones and zeros indicating if x is within the constraints
[ "Returns", "array", "of", "ones", "and", "zeros", "indicating", "if", "x", "is", "within", "the", "constraints" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/space.py#L297-L312
242,042
SheffieldML/GPyOpt
GPyOpt/core/task/space.py
Design_space.input_dim
def input_dim(self): """ Extracts the input dimension of the domain. """ n_cont = len(self.get_continuous_dims()) n_disc = len(self.get_discrete_dims()) return n_cont + n_disc
python
def input_dim(self): n_cont = len(self.get_continuous_dims()) n_disc = len(self.get_discrete_dims()) return n_cont + n_disc
[ "def", "input_dim", "(", "self", ")", ":", "n_cont", "=", "len", "(", "self", ".", "get_continuous_dims", "(", ")", ")", "n_disc", "=", "len", "(", "self", ".", "get_discrete_dims", "(", ")", ")", "return", "n_cont", "+", "n_disc" ]
Extracts the input dimension of the domain.
[ "Extracts", "the", "input", "dimension", "of", "the", "domain", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/space.py#L314-L320
242,043
SheffieldML/GPyOpt
GPyOpt/core/task/space.py
Design_space.round_optimum
def round_optimum(self, x): """ Rounds some value x to a feasible value in the design space. x is expected to be a vector or an array with a single row """ x = np.array(x) if not ((x.ndim == 1) or (x.ndim == 2 and x.shape[0] == 1)): raise ValueError("Unexpected dimentionality of x. Got {}, expected (1, N) or (N,)".format(x.ndim)) if x.ndim == 2: x = x[0] x_rounded = [] value_index = 0 for variable in self.space_expanded: var_value = x[value_index : value_index + variable.dimensionality_in_model] var_value_rounded = variable.round(var_value) x_rounded.append(var_value_rounded) value_index += variable.dimensionality_in_model return np.atleast_2d(np.concatenate(x_rounded))
python
def round_optimum(self, x): x = np.array(x) if not ((x.ndim == 1) or (x.ndim == 2 and x.shape[0] == 1)): raise ValueError("Unexpected dimentionality of x. Got {}, expected (1, N) or (N,)".format(x.ndim)) if x.ndim == 2: x = x[0] x_rounded = [] value_index = 0 for variable in self.space_expanded: var_value = x[value_index : value_index + variable.dimensionality_in_model] var_value_rounded = variable.round(var_value) x_rounded.append(var_value_rounded) value_index += variable.dimensionality_in_model return np.atleast_2d(np.concatenate(x_rounded))
[ "def", "round_optimum", "(", "self", ",", "x", ")", ":", "x", "=", "np", ".", "array", "(", "x", ")", "if", "not", "(", "(", "x", ".", "ndim", "==", "1", ")", "or", "(", "x", ".", "ndim", "==", "2", "and", "x", ".", "shape", "[", "0", "]"...
Rounds some value x to a feasible value in the design space. x is expected to be a vector or an array with a single row
[ "Rounds", "some", "value", "x", "to", "a", "feasible", "value", "in", "the", "design", "space", ".", "x", "is", "expected", "to", "be", "a", "vector", "or", "an", "array", "with", "a", "single", "row" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/space.py#L322-L343
242,044
SheffieldML/GPyOpt
GPyOpt/core/task/space.py
Design_space.get_continuous_bounds
def get_continuous_bounds(self): """ Extracts the bounds of the continuous variables. """ bounds = [] for d in self.space: if d.type == 'continuous': bounds.extend([d.domain]*d.dimensionality) return bounds
python
def get_continuous_bounds(self): bounds = [] for d in self.space: if d.type == 'continuous': bounds.extend([d.domain]*d.dimensionality) return bounds
[ "def", "get_continuous_bounds", "(", "self", ")", ":", "bounds", "=", "[", "]", "for", "d", "in", "self", ".", "space", ":", "if", "d", ".", "type", "==", "'continuous'", ":", "bounds", ".", "extend", "(", "[", "d", ".", "domain", "]", "*", "d", ...
Extracts the bounds of the continuous variables.
[ "Extracts", "the", "bounds", "of", "the", "continuous", "variables", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/space.py#L353-L361
242,045
SheffieldML/GPyOpt
GPyOpt/core/task/space.py
Design_space.get_continuous_dims
def get_continuous_dims(self): """ Returns the dimension of the continuous components of the domain. """ continuous_dims = [] for i in range(self.dimensionality): if self.space_expanded[i].type == 'continuous': continuous_dims += [i] return continuous_dims
python
def get_continuous_dims(self): continuous_dims = [] for i in range(self.dimensionality): if self.space_expanded[i].type == 'continuous': continuous_dims += [i] return continuous_dims
[ "def", "get_continuous_dims", "(", "self", ")", ":", "continuous_dims", "=", "[", "]", "for", "i", "in", "range", "(", "self", ".", "dimensionality", ")", ":", "if", "self", ".", "space_expanded", "[", "i", "]", ".", "type", "==", "'continuous'", ":", ...
Returns the dimension of the continuous components of the domain.
[ "Returns", "the", "dimension", "of", "the", "continuous", "components", "of", "the", "domain", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/space.py#L364-L372
242,046
SheffieldML/GPyOpt
GPyOpt/core/task/space.py
Design_space.get_discrete_grid
def get_discrete_grid(self): """ Computes a Numpy array with the grid of points that results after crossing the possible outputs of the discrete variables """ sets_grid = [] for d in self.space: if d.type == 'discrete': sets_grid.extend([d.domain]*d.dimensionality) return np.array(list(itertools.product(*sets_grid)))
python
def get_discrete_grid(self): sets_grid = [] for d in self.space: if d.type == 'discrete': sets_grid.extend([d.domain]*d.dimensionality) return np.array(list(itertools.product(*sets_grid)))
[ "def", "get_discrete_grid", "(", "self", ")", ":", "sets_grid", "=", "[", "]", "for", "d", "in", "self", ".", "space", ":", "if", "d", ".", "type", "==", "'discrete'", ":", "sets_grid", ".", "extend", "(", "[", "d", ".", "domain", "]", "*", "d", ...
Computes a Numpy array with the grid of points that results after crossing the possible outputs of the discrete variables
[ "Computes", "a", "Numpy", "array", "with", "the", "grid", "of", "points", "that", "results", "after", "crossing", "the", "possible", "outputs", "of", "the", "discrete", "variables" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/space.py#L387-L396
242,047
SheffieldML/GPyOpt
GPyOpt/core/task/space.py
Design_space.get_discrete_dims
def get_discrete_dims(self): """ Returns the dimension of the discrete components of the domain. """ discrete_dims = [] for i in range(self.dimensionality): if self.space_expanded[i].type == 'discrete': discrete_dims += [i] return discrete_dims
python
def get_discrete_dims(self): discrete_dims = [] for i in range(self.dimensionality): if self.space_expanded[i].type == 'discrete': discrete_dims += [i] return discrete_dims
[ "def", "get_discrete_dims", "(", "self", ")", ":", "discrete_dims", "=", "[", "]", "for", "i", "in", "range", "(", "self", ".", "dimensionality", ")", ":", "if", "self", ".", "space_expanded", "[", "i", "]", ".", "type", "==", "'discrete'", ":", "discr...
Returns the dimension of the discrete components of the domain.
[ "Returns", "the", "dimension", "of", "the", "discrete", "components", "of", "the", "domain", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/space.py#L399-L407
242,048
SheffieldML/GPyOpt
GPyOpt/core/task/space.py
Design_space.get_bandit
def get_bandit(self): """ Extracts the arms of the bandit if any. """ arms_bandit = [] for d in self.space: if d.type == 'bandit': arms_bandit += tuple(map(tuple, d.domain)) return np.asarray(arms_bandit)
python
def get_bandit(self): arms_bandit = [] for d in self.space: if d.type == 'bandit': arms_bandit += tuple(map(tuple, d.domain)) return np.asarray(arms_bandit)
[ "def", "get_bandit", "(", "self", ")", ":", "arms_bandit", "=", "[", "]", "for", "d", "in", "self", ".", "space", ":", "if", "d", ".", "type", "==", "'bandit'", ":", "arms_bandit", "+=", "tuple", "(", "map", "(", "tuple", ",", "d", ".", "domain", ...
Extracts the arms of the bandit if any.
[ "Extracts", "the", "arms", "of", "the", "bandit", "if", "any", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/task/space.py#L422-L430
242,049
SheffieldML/GPyOpt
GPyOpt/models/rfmodel.py
RFModel.predict
def predict(self, X): """ Predictions with the model. Returns posterior means and standard deviations at X. """ X = np.atleast_2d(X) m = np.empty(shape=(0,1)) s = np.empty(shape=(0,1)) for k in range(X.shape[0]): preds = [] for pred in self.model.estimators_: preds.append(pred.predict(X[k,:])[0]) m = np.vstack((m ,np.array(preds).mean())) s = np.vstack((s ,np.array(preds).std())) return m, s
python
def predict(self, X): X = np.atleast_2d(X) m = np.empty(shape=(0,1)) s = np.empty(shape=(0,1)) for k in range(X.shape[0]): preds = [] for pred in self.model.estimators_: preds.append(pred.predict(X[k,:])[0]) m = np.vstack((m ,np.array(preds).mean())) s = np.vstack((s ,np.array(preds).std())) return m, s
[ "def", "predict", "(", "self", ",", "X", ")", ":", "X", "=", "np", ".", "atleast_2d", "(", "X", ")", "m", "=", "np", ".", "empty", "(", "shape", "=", "(", "0", ",", "1", ")", ")", "s", "=", "np", ".", "empty", "(", "shape", "=", "(", "0",...
Predictions with the model. Returns posterior means and standard deviations at X.
[ "Predictions", "with", "the", "model", ".", "Returns", "posterior", "means", "and", "standard", "deviations", "at", "X", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/models/rfmodel.py#L79-L93
242,050
SheffieldML/GPyOpt
GPyOpt/acquisitions/MPI_mcmc.py
AcquisitionMPI_MCMC._compute_acq
def _compute_acq(self,x): """ Integrated Expected Improvement """ means, stds = self.model.predict(x) fmins = self.model.get_fmin() f_acqu = 0 for m,s,fmin in zip(means, stds, fmins): _, Phi, _ = get_quantiles(self.jitter, fmin, m, s) f_acqu += Phi return f_acqu/len(means)
python
def _compute_acq(self,x): means, stds = self.model.predict(x) fmins = self.model.get_fmin() f_acqu = 0 for m,s,fmin in zip(means, stds, fmins): _, Phi, _ = get_quantiles(self.jitter, fmin, m, s) f_acqu += Phi return f_acqu/len(means)
[ "def", "_compute_acq", "(", "self", ",", "x", ")", ":", "means", ",", "stds", "=", "self", ".", "model", ".", "predict", "(", "x", ")", "fmins", "=", "self", ".", "model", ".", "get_fmin", "(", ")", "f_acqu", "=", "0", "for", "m", ",", "s", ","...
Integrated Expected Improvement
[ "Integrated", "Expected", "Improvement" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/acquisitions/MPI_mcmc.py#L29-L39
242,051
SheffieldML/GPyOpt
GPyOpt/acquisitions/MPI_mcmc.py
AcquisitionMPI_MCMC._compute_acq_withGradients
def _compute_acq_withGradients(self, x): """ Integrated Expected Improvement and its derivative """ means, stds, dmdxs, dsdxs = self.model.predict_withGradients(x) fmins = self.model.get_fmin() f_acqu = None df_acqu = None for m, s, fmin, dmdx, dsdx in zip(means, stds, fmins, dmdxs, dsdxs): phi, Phi, u = get_quantiles(self.jitter, fmin, m, s) f = Phi df = -(phi/s)* (dmdx + dsdx * u) if f_acqu is None: f_acqu = f df_acqu = df else: f_acqu += f df_acqu += df return f_acqu/(len(means)), df_acqu/(len(means))
python
def _compute_acq_withGradients(self, x): means, stds, dmdxs, dsdxs = self.model.predict_withGradients(x) fmins = self.model.get_fmin() f_acqu = None df_acqu = None for m, s, fmin, dmdx, dsdx in zip(means, stds, fmins, dmdxs, dsdxs): phi, Phi, u = get_quantiles(self.jitter, fmin, m, s) f = Phi df = -(phi/s)* (dmdx + dsdx * u) if f_acqu is None: f_acqu = f df_acqu = df else: f_acqu += f df_acqu += df return f_acqu/(len(means)), df_acqu/(len(means))
[ "def", "_compute_acq_withGradients", "(", "self", ",", "x", ")", ":", "means", ",", "stds", ",", "dmdxs", ",", "dsdxs", "=", "self", ".", "model", ".", "predict_withGradients", "(", "x", ")", "fmins", "=", "self", ".", "model", ".", "get_fmin", "(", ")...
Integrated Expected Improvement and its derivative
[ "Integrated", "Expected", "Improvement", "and", "its", "derivative" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/acquisitions/MPI_mcmc.py#L41-L59
242,052
SheffieldML/GPyOpt
GPyOpt/plotting/plots_bo.py
plot_convergence
def plot_convergence(Xdata,best_Y, filename = None): ''' Plots to evaluate the convergence of standard Bayesian optimization algorithms ''' n = Xdata.shape[0] aux = (Xdata[1:n,:]-Xdata[0:n-1,:])**2 distances = np.sqrt(aux.sum(axis=1)) ## Distances between consecutive x's plt.figure(figsize=(10,5)) plt.subplot(1, 2, 1) plt.plot(list(range(n-1)), distances, '-ro') plt.xlabel('Iteration') plt.ylabel('d(x[n], x[n-1])') plt.title('Distance between consecutive x\'s') grid(True) # Estimated m(x) at the proposed sampling points plt.subplot(1, 2, 2) plt.plot(list(range(n)),best_Y,'-o') plt.title('Value of the best selected sample') plt.xlabel('Iteration') plt.ylabel('Best y') grid(True) if filename!=None: savefig(filename) else: plt.show()
python
def plot_convergence(Xdata,best_Y, filename = None): ''' Plots to evaluate the convergence of standard Bayesian optimization algorithms ''' n = Xdata.shape[0] aux = (Xdata[1:n,:]-Xdata[0:n-1,:])**2 distances = np.sqrt(aux.sum(axis=1)) ## Distances between consecutive x's plt.figure(figsize=(10,5)) plt.subplot(1, 2, 1) plt.plot(list(range(n-1)), distances, '-ro') plt.xlabel('Iteration') plt.ylabel('d(x[n], x[n-1])') plt.title('Distance between consecutive x\'s') grid(True) # Estimated m(x) at the proposed sampling points plt.subplot(1, 2, 2) plt.plot(list(range(n)),best_Y,'-o') plt.title('Value of the best selected sample') plt.xlabel('Iteration') plt.ylabel('Best y') grid(True) if filename!=None: savefig(filename) else: plt.show()
[ "def", "plot_convergence", "(", "Xdata", ",", "best_Y", ",", "filename", "=", "None", ")", ":", "n", "=", "Xdata", ".", "shape", "[", "0", "]", "aux", "=", "(", "Xdata", "[", "1", ":", "n", ",", ":", "]", "-", "Xdata", "[", "0", ":", "n", "-"...
Plots to evaluate the convergence of standard Bayesian optimization algorithms
[ "Plots", "to", "evaluate", "the", "convergence", "of", "standard", "Bayesian", "optimization", "algorithms" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/plotting/plots_bo.py#L122-L150
242,053
SheffieldML/GPyOpt
GPyOpt/core/evaluators/batch_local_penalization.py
LocalPenalization.compute_batch
def compute_batch(self, duplicate_manager=None, context_manager=None): """ Computes the elements of the batch sequentially by penalizing the acquisition. """ from ...acquisitions import AcquisitionLP assert isinstance(self.acquisition, AcquisitionLP) self.acquisition.update_batches(None,None,None) # --- GET first element in the batch X_batch = self.acquisition.optimize()[0] k=1 if self.batch_size >1: # ---------- Approximate the constants of the the method L = estimate_L(self.acquisition.model.model,self.acquisition.space.get_bounds()) Min = self.acquisition.model.model.Y.min() # --- GET the remaining elements while k<self.batch_size: self.acquisition.update_batches(X_batch,L,Min) new_sample = self.acquisition.optimize()[0] X_batch = np.vstack((X_batch,new_sample)) k +=1 # --- Back to the non-penalized acquisition self.acquisition.update_batches(None,None,None) return X_batch
python
def compute_batch(self, duplicate_manager=None, context_manager=None): from ...acquisitions import AcquisitionLP assert isinstance(self.acquisition, AcquisitionLP) self.acquisition.update_batches(None,None,None) # --- GET first element in the batch X_batch = self.acquisition.optimize()[0] k=1 if self.batch_size >1: # ---------- Approximate the constants of the the method L = estimate_L(self.acquisition.model.model,self.acquisition.space.get_bounds()) Min = self.acquisition.model.model.Y.min() # --- GET the remaining elements while k<self.batch_size: self.acquisition.update_batches(X_batch,L,Min) new_sample = self.acquisition.optimize()[0] X_batch = np.vstack((X_batch,new_sample)) k +=1 # --- Back to the non-penalized acquisition self.acquisition.update_batches(None,None,None) return X_batch
[ "def", "compute_batch", "(", "self", ",", "duplicate_manager", "=", "None", ",", "context_manager", "=", "None", ")", ":", "from", ".", ".", ".", "acquisitions", "import", "AcquisitionLP", "assert", "isinstance", "(", "self", ".", "acquisition", ",", "Acquisit...
Computes the elements of the batch sequentially by penalizing the acquisition.
[ "Computes", "the", "elements", "of", "the", "batch", "sequentially", "by", "penalizing", "the", "acquisition", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/evaluators/batch_local_penalization.py#L22-L49
242,054
SheffieldML/GPyOpt
manual/notebooks_check.py
check_notebooks_for_errors
def check_notebooks_for_errors(notebooks_directory): ''' Evaluates all notebooks in given directory and prints errors, if any ''' print("Checking notebooks in directory {} for errors".format(notebooks_directory)) failed_notebooks_count = 0 for file in os.listdir(notebooks_directory): if file.endswith(".ipynb"): print("Checking notebook " + file) full_file_path = os.path.join(notebooks_directory, file) output, errors = run_notebook(full_file_path) if errors is not None and len(errors) > 0: failed_notebooks_count += 1 print("Errors in notebook " + file) print(errors) if failed_notebooks_count == 0: print("No errors found in notebooks under " + notebooks_directory)
python
def check_notebooks_for_errors(notebooks_directory): ''' Evaluates all notebooks in given directory and prints errors, if any ''' print("Checking notebooks in directory {} for errors".format(notebooks_directory)) failed_notebooks_count = 0 for file in os.listdir(notebooks_directory): if file.endswith(".ipynb"): print("Checking notebook " + file) full_file_path = os.path.join(notebooks_directory, file) output, errors = run_notebook(full_file_path) if errors is not None and len(errors) > 0: failed_notebooks_count += 1 print("Errors in notebook " + file) print(errors) if failed_notebooks_count == 0: print("No errors found in notebooks under " + notebooks_directory)
[ "def", "check_notebooks_for_errors", "(", "notebooks_directory", ")", ":", "print", "(", "\"Checking notebooks in directory {} for errors\"", ".", "format", "(", "notebooks_directory", ")", ")", "failed_notebooks_count", "=", "0", "for", "file", "in", "os", ".", "listdi...
Evaluates all notebooks in given directory and prints errors, if any
[ "Evaluates", "all", "notebooks", "in", "given", "directory", "and", "prints", "errors", "if", "any" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/manual/notebooks_check.py#L8-L24
242,055
SheffieldML/GPyOpt
GPyOpt/models/gpmodel.py
GPModel.predict
def predict(self, X, with_noise=True): """ Predictions with the model. Returns posterior means and standard deviations at X. Note that this is different in GPy where the variances are given. Parameters: X (np.ndarray) - points to run the prediction for. with_noise (bool) - whether to add noise to the prediction. Default is True. """ m, v = self._predict(X, False, with_noise) # We can take the square root because v is just a diagonal matrix of variances return m, np.sqrt(v)
python
def predict(self, X, with_noise=True): m, v = self._predict(X, False, with_noise) # We can take the square root because v is just a diagonal matrix of variances return m, np.sqrt(v)
[ "def", "predict", "(", "self", ",", "X", ",", "with_noise", "=", "True", ")", ":", "m", ",", "v", "=", "self", ".", "_predict", "(", "X", ",", "False", ",", "with_noise", ")", "# We can take the square root because v is just a diagonal matrix of variances", "ret...
Predictions with the model. Returns posterior means and standard deviations at X. Note that this is different in GPy where the variances are given. Parameters: X (np.ndarray) - points to run the prediction for. with_noise (bool) - whether to add noise to the prediction. Default is True.
[ "Predictions", "with", "the", "model", ".", "Returns", "posterior", "means", "and", "standard", "deviations", "at", "X", ".", "Note", "that", "this", "is", "different", "in", "GPy", "where", "the", "variances", "are", "given", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/models/gpmodel.py#L100-L110
242,056
SheffieldML/GPyOpt
GPyOpt/models/gpmodel.py
GPModel.predict_covariance
def predict_covariance(self, X, with_noise=True): """ Predicts the covariance matric for points in X. Parameters: X (np.ndarray) - points to run the prediction for. with_noise (bool) - whether to add noise to the prediction. Default is True. """ _, v = self._predict(X, True, with_noise) return v
python
def predict_covariance(self, X, with_noise=True): _, v = self._predict(X, True, with_noise) return v
[ "def", "predict_covariance", "(", "self", ",", "X", ",", "with_noise", "=", "True", ")", ":", "_", ",", "v", "=", "self", ".", "_predict", "(", "X", ",", "True", ",", "with_noise", ")", "return", "v" ]
Predicts the covariance matric for points in X. Parameters: X (np.ndarray) - points to run the prediction for. with_noise (bool) - whether to add noise to the prediction. Default is True.
[ "Predicts", "the", "covariance", "matric", "for", "points", "in", "X", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/models/gpmodel.py#L112-L121
242,057
SheffieldML/GPyOpt
GPyOpt/models/gpmodel.py
GPModel.predict_withGradients
def predict_withGradients(self, X): """ Returns the mean, standard deviation, mean gradient and standard deviation gradient at X. """ if X.ndim==1: X = X[None,:] m, v = self.model.predict(X) v = np.clip(v, 1e-10, np.inf) dmdx, dvdx = self.model.predictive_gradients(X) dmdx = dmdx[:,:,0] dsdx = dvdx / (2*np.sqrt(v)) return m, np.sqrt(v), dmdx, dsdx
python
def predict_withGradients(self, X): if X.ndim==1: X = X[None,:] m, v = self.model.predict(X) v = np.clip(v, 1e-10, np.inf) dmdx, dvdx = self.model.predictive_gradients(X) dmdx = dmdx[:,:,0] dsdx = dvdx / (2*np.sqrt(v)) return m, np.sqrt(v), dmdx, dsdx
[ "def", "predict_withGradients", "(", "self", ",", "X", ")", ":", "if", "X", ".", "ndim", "==", "1", ":", "X", "=", "X", "[", "None", ",", ":", "]", "m", ",", "v", "=", "self", ".", "model", ".", "predict", "(", "X", ")", "v", "=", "np", "."...
Returns the mean, standard deviation, mean gradient and standard deviation gradient at X.
[ "Returns", "the", "mean", "standard", "deviation", "mean", "gradient", "and", "standard", "deviation", "gradient", "at", "X", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/models/gpmodel.py#L129-L140
242,058
SheffieldML/GPyOpt
GPyOpt/models/gpmodel.py
GPModel_MCMC.predict
def predict(self, X): """ Predictions with the model for all the MCMC samples. Returns posterior means and standard deviations at X. Note that this is different in GPy where the variances are given. """ if X.ndim==1: X = X[None,:] ps = self.model.param_array.copy() means = [] stds = [] for s in self.hmc_samples: if self.model._fixes_ is None: self.model[:] = s else: self.model[self.model._fixes_] = s self.model._trigger_params_changed() m, v = self.model.predict(X) means.append(m) stds.append(np.sqrt(np.clip(v, 1e-10, np.inf))) self.model.param_array[:] = ps self.model._trigger_params_changed() return means, stds
python
def predict(self, X): if X.ndim==1: X = X[None,:] ps = self.model.param_array.copy() means = [] stds = [] for s in self.hmc_samples: if self.model._fixes_ is None: self.model[:] = s else: self.model[self.model._fixes_] = s self.model._trigger_params_changed() m, v = self.model.predict(X) means.append(m) stds.append(np.sqrt(np.clip(v, 1e-10, np.inf))) self.model.param_array[:] = ps self.model._trigger_params_changed() return means, stds
[ "def", "predict", "(", "self", ",", "X", ")", ":", "if", "X", ".", "ndim", "==", "1", ":", "X", "=", "X", "[", "None", ",", ":", "]", "ps", "=", "self", ".", "model", ".", "param_array", ".", "copy", "(", ")", "means", "=", "[", "]", "stds"...
Predictions with the model for all the MCMC samples. Returns posterior means and standard deviations at X. Note that this is different in GPy where the variances are given.
[ "Predictions", "with", "the", "model", "for", "all", "the", "MCMC", "samples", ".", "Returns", "posterior", "means", "and", "standard", "deviations", "at", "X", ".", "Note", "that", "this", "is", "different", "in", "GPy", "where", "the", "variances", "are", ...
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/models/gpmodel.py#L255-L275
242,059
SheffieldML/GPyOpt
GPyOpt/models/gpmodel.py
GPModel_MCMC.get_fmin
def get_fmin(self): """ Returns the location where the posterior mean is takes its minimal value. """ ps = self.model.param_array.copy() fmins = [] for s in self.hmc_samples: if self.model._fixes_ is None: self.model[:] = s else: self.model[self.model._fixes_] = s self.model._trigger_params_changed() fmins.append(self.model.predict(self.model.X)[0].min()) self.model.param_array[:] = ps self.model._trigger_params_changed() return fmins
python
def get_fmin(self): ps = self.model.param_array.copy() fmins = [] for s in self.hmc_samples: if self.model._fixes_ is None: self.model[:] = s else: self.model[self.model._fixes_] = s self.model._trigger_params_changed() fmins.append(self.model.predict(self.model.X)[0].min()) self.model.param_array[:] = ps self.model._trigger_params_changed() return fmins
[ "def", "get_fmin", "(", "self", ")", ":", "ps", "=", "self", ".", "model", ".", "param_array", ".", "copy", "(", ")", "fmins", "=", "[", "]", "for", "s", "in", "self", ".", "hmc_samples", ":", "if", "self", ".", "model", ".", "_fixes_", "is", "No...
Returns the location where the posterior mean is takes its minimal value.
[ "Returns", "the", "location", "where", "the", "posterior", "mean", "is", "takes", "its", "minimal", "value", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/models/gpmodel.py#L277-L293
242,060
SheffieldML/GPyOpt
GPyOpt/models/gpmodel.py
GPModel_MCMC.predict_withGradients
def predict_withGradients(self, X): """ Returns the mean, standard deviation, mean gradient and standard deviation gradient at X for all the MCMC samples. """ if X.ndim==1: X = X[None,:] ps = self.model.param_array.copy() means = [] stds = [] dmdxs = [] dsdxs = [] for s in self.hmc_samples: if self.model._fixes_ is None: self.model[:] = s else: self.model[self.model._fixes_] = s self.model._trigger_params_changed() m, v = self.model.predict(X) std = np.sqrt(np.clip(v, 1e-10, np.inf)) dmdx, dvdx = self.model.predictive_gradients(X) dmdx = dmdx[:,:,0] dsdx = dvdx / (2*std) means.append(m) stds.append(std) dmdxs.append(dmdx) dsdxs.append(dsdx) self.model.param_array[:] = ps self.model._trigger_params_changed() return means, stds, dmdxs, dsdxs
python
def predict_withGradients(self, X): if X.ndim==1: X = X[None,:] ps = self.model.param_array.copy() means = [] stds = [] dmdxs = [] dsdxs = [] for s in self.hmc_samples: if self.model._fixes_ is None: self.model[:] = s else: self.model[self.model._fixes_] = s self.model._trigger_params_changed() m, v = self.model.predict(X) std = np.sqrt(np.clip(v, 1e-10, np.inf)) dmdx, dvdx = self.model.predictive_gradients(X) dmdx = dmdx[:,:,0] dsdx = dvdx / (2*std) means.append(m) stds.append(std) dmdxs.append(dmdx) dsdxs.append(dsdx) self.model.param_array[:] = ps self.model._trigger_params_changed() return means, stds, dmdxs, dsdxs
[ "def", "predict_withGradients", "(", "self", ",", "X", ")", ":", "if", "X", ".", "ndim", "==", "1", ":", "X", "=", "X", "[", "None", ",", ":", "]", "ps", "=", "self", ".", "model", ".", "param_array", ".", "copy", "(", ")", "means", "=", "[", ...
Returns the mean, standard deviation, mean gradient and standard deviation gradient at X for all the MCMC samples.
[ "Returns", "the", "mean", "standard", "deviation", "mean", "gradient", "and", "standard", "deviation", "gradient", "at", "X", "for", "all", "the", "MCMC", "samples", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/models/gpmodel.py#L295-L322
242,061
SheffieldML/GPyOpt
GPyOpt/interface/func_loader.py
load_objective
def load_objective(config): """ Loads the objective function from a .json file. """ assert 'prjpath' in config assert 'main-file' in config, "The problem file ('main-file') is missing!" os.chdir(config['prjpath']) if config['language'].lower()=='python': assert config['main-file'].endswith('.py'), 'The python problem file has to end with .py!' import imp m = imp.load_source(config['main-file'][:-3], os.path.join(config['prjpath'],config['main-file'])) func = m.__dict__[config['main-file'][:-3]] return func
python
def load_objective(config): assert 'prjpath' in config assert 'main-file' in config, "The problem file ('main-file') is missing!" os.chdir(config['prjpath']) if config['language'].lower()=='python': assert config['main-file'].endswith('.py'), 'The python problem file has to end with .py!' import imp m = imp.load_source(config['main-file'][:-3], os.path.join(config['prjpath'],config['main-file'])) func = m.__dict__[config['main-file'][:-3]] return func
[ "def", "load_objective", "(", "config", ")", ":", "assert", "'prjpath'", "in", "config", "assert", "'main-file'", "in", "config", ",", "\"The problem file ('main-file') is missing!\"", "os", ".", "chdir", "(", "config", "[", "'prjpath'", "]", ")", "if", "config", ...
Loads the objective function from a .json file.
[ "Loads", "the", "objective", "function", "from", "a", ".", "json", "file", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/interface/func_loader.py#L7-L21
242,062
SheffieldML/GPyOpt
GPyOpt/acquisitions/EI.py
AcquisitionEI._compute_acq
def _compute_acq(self, x): """ Computes the Expected Improvement per unit of cost """ m, s = self.model.predict(x) fmin = self.model.get_fmin() phi, Phi, u = get_quantiles(self.jitter, fmin, m, s) f_acqu = s * (u * Phi + phi) return f_acqu
python
def _compute_acq(self, x): m, s = self.model.predict(x) fmin = self.model.get_fmin() phi, Phi, u = get_quantiles(self.jitter, fmin, m, s) f_acqu = s * (u * Phi + phi) return f_acqu
[ "def", "_compute_acq", "(", "self", ",", "x", ")", ":", "m", ",", "s", "=", "self", ".", "model", ".", "predict", "(", "x", ")", "fmin", "=", "self", ".", "model", ".", "get_fmin", "(", ")", "phi", ",", "Phi", ",", "u", "=", "get_quantiles", "(...
Computes the Expected Improvement per unit of cost
[ "Computes", "the", "Expected", "Improvement", "per", "unit", "of", "cost" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/acquisitions/EI.py#L32-L40
242,063
SheffieldML/GPyOpt
GPyOpt/interface/config_parser.py
update_config
def update_config(config_new, config_default): ''' Updates the loaded method configuration with default values. ''' if any([isinstance(v, dict) for v in list(config_new.values())]): for k,v in list(config_new.items()): if isinstance(v,dict) and k in config_default: update_config(config_new[k],config_default[k]) else: config_default[k] = v else: config_default.update(config_new) return config_default
python
def update_config(config_new, config_default): ''' Updates the loaded method configuration with default values. ''' if any([isinstance(v, dict) for v in list(config_new.values())]): for k,v in list(config_new.items()): if isinstance(v,dict) and k in config_default: update_config(config_new[k],config_default[k]) else: config_default[k] = v else: config_default.update(config_new) return config_default
[ "def", "update_config", "(", "config_new", ",", "config_default", ")", ":", "if", "any", "(", "[", "isinstance", "(", "v", ",", "dict", ")", "for", "v", "in", "list", "(", "config_new", ".", "values", "(", ")", ")", "]", ")", ":", "for", "k", ",", ...
Updates the loaded method configuration with default values.
[ "Updates", "the", "loaded", "method", "configuration", "with", "default", "values", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/interface/config_parser.py#L62-L75
242,064
SheffieldML/GPyOpt
GPyOpt/interface/config_parser.py
parser
def parser(input_file_path='config.json'): ''' Parser for the .json file containing the configuration of the method. ''' # --- Read .json file try: with open(input_file_path, 'r') as config_file: config_new = json.load(config_file) config_file.close() except: raise Exception('Config file "'+input_file_path+'" not loaded properly. Please check it an try again.') import copy options = update_config(config_new, copy.deepcopy(default_config)) return options
python
def parser(input_file_path='config.json'): ''' Parser for the .json file containing the configuration of the method. ''' # --- Read .json file try: with open(input_file_path, 'r') as config_file: config_new = json.load(config_file) config_file.close() except: raise Exception('Config file "'+input_file_path+'" not loaded properly. Please check it an try again.') import copy options = update_config(config_new, copy.deepcopy(default_config)) return options
[ "def", "parser", "(", "input_file_path", "=", "'config.json'", ")", ":", "# --- Read .json file", "try", ":", "with", "open", "(", "input_file_path", ",", "'r'", ")", "as", "config_file", ":", "config_new", "=", "json", ".", "load", "(", "config_file", ")", ...
Parser for the .json file containing the configuration of the method.
[ "Parser", "for", "the", ".", "json", "file", "containing", "the", "configuration", "of", "the", "method", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/interface/config_parser.py#L78-L94
242,065
SheffieldML/GPyOpt
GPyOpt/util/mcmc_sampler.py
AffineInvariantEnsembleSampler.get_samples
def get_samples(self, n_samples, log_p_function, burn_in_steps=50): """ Generates samples. Parameters: n_samples - number of samples to generate log_p_function - a function that returns log density for a specific sample burn_in_steps - number of burn-in steps for sampling Returns a tuple of two array: (samples, log_p_function values for samples) """ restarts = initial_design('random', self.space, n_samples) sampler = emcee.EnsembleSampler(n_samples, self.space.input_dim(), log_p_function) samples, samples_log, _ = sampler.run_mcmc(restarts, burn_in_steps) # make sure we have an array of shape (n samples, space input dim) if len(samples.shape) == 1: samples = samples.reshape(-1, 1) samples_log = samples_log.reshape(-1, 1) return samples, samples_log
python
def get_samples(self, n_samples, log_p_function, burn_in_steps=50): restarts = initial_design('random', self.space, n_samples) sampler = emcee.EnsembleSampler(n_samples, self.space.input_dim(), log_p_function) samples, samples_log, _ = sampler.run_mcmc(restarts, burn_in_steps) # make sure we have an array of shape (n samples, space input dim) if len(samples.shape) == 1: samples = samples.reshape(-1, 1) samples_log = samples_log.reshape(-1, 1) return samples, samples_log
[ "def", "get_samples", "(", "self", ",", "n_samples", ",", "log_p_function", ",", "burn_in_steps", "=", "50", ")", ":", "restarts", "=", "initial_design", "(", "'random'", ",", "self", ".", "space", ",", "n_samples", ")", "sampler", "=", "emcee", ".", "Ense...
Generates samples. Parameters: n_samples - number of samples to generate log_p_function - a function that returns log density for a specific sample burn_in_steps - number of burn-in steps for sampling Returns a tuple of two array: (samples, log_p_function values for samples)
[ "Generates", "samples", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/util/mcmc_sampler.py#L39-L59
242,066
SheffieldML/GPyOpt
GPyOpt/core/bo.py
BO.suggest_next_locations
def suggest_next_locations(self, context = None, pending_X = None, ignored_X = None): """ Run a single optimization step and return the next locations to evaluate the objective. Number of suggested locations equals to batch_size. :param context: fixes specified variables to a particular context (values) for the optimization run (default, None). :param pending_X: matrix of input configurations that are in a pending state (i.e., do not have an evaluation yet) (default, None). :param ignored_X: matrix of input configurations that the user black-lists, i.e., those configurations will not be suggested again (default, None). """ self.model_parameters_iterations = None self.num_acquisitions = 0 self.context = context self._update_model(self.normalization_type) suggested_locations = self._compute_next_evaluations(pending_zipped_X = pending_X, ignored_zipped_X = ignored_X) return suggested_locations
python
def suggest_next_locations(self, context = None, pending_X = None, ignored_X = None): self.model_parameters_iterations = None self.num_acquisitions = 0 self.context = context self._update_model(self.normalization_type) suggested_locations = self._compute_next_evaluations(pending_zipped_X = pending_X, ignored_zipped_X = ignored_X) return suggested_locations
[ "def", "suggest_next_locations", "(", "self", ",", "context", "=", "None", ",", "pending_X", "=", "None", ",", "ignored_X", "=", "None", ")", ":", "self", ".", "model_parameters_iterations", "=", "None", "self", ".", "num_acquisitions", "=", "0", "self", "."...
Run a single optimization step and return the next locations to evaluate the objective. Number of suggested locations equals to batch_size. :param context: fixes specified variables to a particular context (values) for the optimization run (default, None). :param pending_X: matrix of input configurations that are in a pending state (i.e., do not have an evaluation yet) (default, None). :param ignored_X: matrix of input configurations that the user black-lists, i.e., those configurations will not be suggested again (default, None).
[ "Run", "a", "single", "optimization", "step", "and", "return", "the", "next", "locations", "to", "evaluate", "the", "objective", ".", "Number", "of", "suggested", "locations", "equals", "to", "batch_size", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/bo.py#L55-L71
242,067
SheffieldML/GPyOpt
GPyOpt/core/bo.py
BO._print_convergence
def _print_convergence(self): """ Prints the reason why the optimization stopped. """ if self.verbosity: if (self.num_acquisitions == self.max_iter) and (not self.initial_iter): print(' ** Maximum number of iterations reached **') return 1 elif (self._distance_last_evaluations() < self.eps) and (not self.initial_iter): print(' ** Two equal location selected **') return 1 elif (self.max_time < self.cum_time) and not (self.initial_iter): print(' ** Evaluation time reached **') return 0 if self.initial_iter: print('** GPyOpt Bayesian Optimization class initialized successfully **') self.initial_iter = False
python
def _print_convergence(self): if self.verbosity: if (self.num_acquisitions == self.max_iter) and (not self.initial_iter): print(' ** Maximum number of iterations reached **') return 1 elif (self._distance_last_evaluations() < self.eps) and (not self.initial_iter): print(' ** Two equal location selected **') return 1 elif (self.max_time < self.cum_time) and not (self.initial_iter): print(' ** Evaluation time reached **') return 0 if self.initial_iter: print('** GPyOpt Bayesian Optimization class initialized successfully **') self.initial_iter = False
[ "def", "_print_convergence", "(", "self", ")", ":", "if", "self", ".", "verbosity", ":", "if", "(", "self", ".", "num_acquisitions", "==", "self", ".", "max_iter", ")", "and", "(", "not", "self", ".", "initial_iter", ")", ":", "print", "(", "' ** Maxim...
Prints the reason why the optimization stopped.
[ "Prints", "the", "reason", "why", "the", "optimization", "stopped", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/bo.py#L172-L190
242,068
SheffieldML/GPyOpt
GPyOpt/core/bo.py
BO.evaluate_objective
def evaluate_objective(self): """ Evaluates the objective """ self.Y_new, cost_new = self.objective.evaluate(self.suggested_sample) self.cost.update_cost_model(self.suggested_sample, cost_new) self.Y = np.vstack((self.Y,self.Y_new))
python
def evaluate_objective(self): self.Y_new, cost_new = self.objective.evaluate(self.suggested_sample) self.cost.update_cost_model(self.suggested_sample, cost_new) self.Y = np.vstack((self.Y,self.Y_new))
[ "def", "evaluate_objective", "(", "self", ")", ":", "self", ".", "Y_new", ",", "cost_new", "=", "self", ".", "objective", ".", "evaluate", "(", "self", ".", "suggested_sample", ")", "self", ".", "cost", ".", "update_cost_model", "(", "self", ".", "suggeste...
Evaluates the objective
[ "Evaluates", "the", "objective" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/bo.py#L193-L199
242,069
SheffieldML/GPyOpt
GPyOpt/core/bo.py
BO._compute_results
def _compute_results(self): """ Computes the optimum and its value. """ self.Y_best = best_value(self.Y) self.x_opt = self.X[np.argmin(self.Y),:] self.fx_opt = np.min(self.Y)
python
def _compute_results(self): self.Y_best = best_value(self.Y) self.x_opt = self.X[np.argmin(self.Y),:] self.fx_opt = np.min(self.Y)
[ "def", "_compute_results", "(", "self", ")", ":", "self", ".", "Y_best", "=", "best_value", "(", "self", ".", "Y", ")", "self", ".", "x_opt", "=", "self", ".", "X", "[", "np", ".", "argmin", "(", "self", ".", "Y", ")", ",", ":", "]", "self", "....
Computes the optimum and its value.
[ "Computes", "the", "optimum", "and", "its", "value", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/bo.py#L201-L207
242,070
SheffieldML/GPyOpt
GPyOpt/core/bo.py
BO._distance_last_evaluations
def _distance_last_evaluations(self): """ Computes the distance between the last two evaluations. """ if self.X.shape[0] < 2: # less than 2 evaluations return np.inf return np.sqrt(np.sum((self.X[-1, :] - self.X[-2, :]) ** 2))
python
def _distance_last_evaluations(self): if self.X.shape[0] < 2: # less than 2 evaluations return np.inf return np.sqrt(np.sum((self.X[-1, :] - self.X[-2, :]) ** 2))
[ "def", "_distance_last_evaluations", "(", "self", ")", ":", "if", "self", ".", "X", ".", "shape", "[", "0", "]", "<", "2", ":", "# less than 2 evaluations", "return", "np", ".", "inf", "return", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "(", "se...
Computes the distance between the last two evaluations.
[ "Computes", "the", "distance", "between", "the", "last", "two", "evaluations", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/bo.py#L209-L216
242,071
SheffieldML/GPyOpt
GPyOpt/core/bo.py
BO.save_evaluations
def save_evaluations(self, evaluations_file = None): """ Saves evaluations at each iteration of the optimization :param evaluations_file: name of the file in which the results are saved. """ iterations = np.array(range(1, self.Y.shape[0] + 1))[:, None] results = np.hstack((iterations, self.Y, self.X)) header = ['Iteration', 'Y'] + ['var_' + str(k) for k in range(1, self.X.shape[1] + 1)] data = [header] + results.tolist() self._write_csv(evaluations_file, data)
python
def save_evaluations(self, evaluations_file = None): iterations = np.array(range(1, self.Y.shape[0] + 1))[:, None] results = np.hstack((iterations, self.Y, self.X)) header = ['Iteration', 'Y'] + ['var_' + str(k) for k in range(1, self.X.shape[1] + 1)] data = [header] + results.tolist() self._write_csv(evaluations_file, data)
[ "def", "save_evaluations", "(", "self", ",", "evaluations_file", "=", "None", ")", ":", "iterations", "=", "np", ".", "array", "(", "range", "(", "1", ",", "self", ".", "Y", ".", "shape", "[", "0", "]", "+", "1", ")", ")", "[", ":", ",", "None", ...
Saves evaluations at each iteration of the optimization :param evaluations_file: name of the file in which the results are saved.
[ "Saves", "evaluations", "at", "each", "iteration", "of", "the", "optimization" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/bo.py#L367-L378
242,072
SheffieldML/GPyOpt
GPyOpt/core/bo.py
BO.save_models
def save_models(self, models_file): """ Saves model parameters at each iteration of the optimization :param models_file: name of the file or a file buffer, in which the results are saved. """ if self.model_parameters_iterations is None: raise ValueError("No iterations have been carried out yet and hence no iterations of the BO can be saved") iterations = np.array(range(1,self.model_parameters_iterations.shape[0]+1))[:,None] results = np.hstack((iterations,self.model_parameters_iterations)) header = ['Iteration'] + self.model.get_model_parameters_names() data = [header] + results.tolist() self._write_csv(models_file, data)
python
def save_models(self, models_file): if self.model_parameters_iterations is None: raise ValueError("No iterations have been carried out yet and hence no iterations of the BO can be saved") iterations = np.array(range(1,self.model_parameters_iterations.shape[0]+1))[:,None] results = np.hstack((iterations,self.model_parameters_iterations)) header = ['Iteration'] + self.model.get_model_parameters_names() data = [header] + results.tolist() self._write_csv(models_file, data)
[ "def", "save_models", "(", "self", ",", "models_file", ")", ":", "if", "self", ".", "model_parameters_iterations", "is", "None", ":", "raise", "ValueError", "(", "\"No iterations have been carried out yet and hence no iterations of the BO can be saved\"", ")", "iterations", ...
Saves model parameters at each iteration of the optimization :param models_file: name of the file or a file buffer, in which the results are saved.
[ "Saves", "model", "parameters", "at", "each", "iteration", "of", "the", "optimization" ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/core/bo.py#L380-L394
242,073
SheffieldML/GPyOpt
GPyOpt/methods/bayesian_optimization.py
BayesianOptimization._init_design_chooser
def _init_design_chooser(self): """ Initializes the choice of X and Y based on the selected initial design and number of points selected. """ # If objective function was not provided, we require some initial sample data if self.f is None and (self.X is None or self.Y is None): raise InvalidConfigError("Initial data for both X and Y is required when objective function is not provided") # Case 1: if self.X is None: self.X = initial_design(self.initial_design_type, self.space, self.initial_design_numdata) self.Y, _ = self.objective.evaluate(self.X) # Case 2 elif self.X is not None and self.Y is None: self.Y, _ = self.objective.evaluate(self.X)
python
def _init_design_chooser(self): # If objective function was not provided, we require some initial sample data if self.f is None and (self.X is None or self.Y is None): raise InvalidConfigError("Initial data for both X and Y is required when objective function is not provided") # Case 1: if self.X is None: self.X = initial_design(self.initial_design_type, self.space, self.initial_design_numdata) self.Y, _ = self.objective.evaluate(self.X) # Case 2 elif self.X is not None and self.Y is None: self.Y, _ = self.objective.evaluate(self.X)
[ "def", "_init_design_chooser", "(", "self", ")", ":", "# If objective function was not provided, we require some initial sample data", "if", "self", ".", "f", "is", "None", "and", "(", "self", ".", "X", "is", "None", "or", "self", ".", "Y", "is", "None", ")", ":...
Initializes the choice of X and Y based on the selected initial design and number of points selected.
[ "Initializes", "the", "choice", "of", "X", "and", "Y", "based", "on", "the", "selected", "initial", "design", "and", "number", "of", "points", "selected", "." ]
255539dc5927819ca701e44fe3d76cd4864222fa
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/methods/bayesian_optimization.py#L183-L198
242,074
coleifer/huey
huey/api.py
crontab
def crontab(minute='*', hour='*', day='*', month='*', day_of_week='*'): """ Convert a "crontab"-style set of parameters into a test function that will return True when the given datetime matches the parameters set forth in the crontab. For day-of-week, 0=Sunday and 6=Saturday. Acceptable inputs: * = every distinct value */n = run every "n" times, i.e. hours='*/4' == 0, 4, 8, 12, 16, 20 m-n = run every time m..n m,n = run on m and n """ validation = ( ('m', month, range(1, 13)), ('d', day, range(1, 32)), ('w', day_of_week, range(8)), # 0-6, but also 7 for Sunday. ('H', hour, range(24)), ('M', minute, range(60)) ) cron_settings = [] for (date_str, value, acceptable) in validation: settings = set([]) if isinstance(value, int): value = str(value) for piece in value.split(','): if piece == '*': settings.update(acceptable) continue if piece.isdigit(): piece = int(piece) if piece not in acceptable: raise ValueError('%d is not a valid input' % piece) elif date_str == 'w': piece %= 7 settings.add(piece) else: dash_match = dash_re.match(piece) if dash_match: lhs, rhs = map(int, dash_match.groups()) if lhs not in acceptable or rhs not in acceptable: raise ValueError('%s is not a valid input' % piece) elif date_str == 'w': lhs %= 7 rhs %= 7 settings.update(range(lhs, rhs + 1)) continue # Handle stuff like */3, */6. every_match = every_re.match(piece) if every_match: if date_str == 'w': raise ValueError('Cannot perform this kind of matching' ' on day-of-week.') interval = int(every_match.groups()[0]) settings.update(acceptable[::interval]) cron_settings.append(sorted(list(settings))) def validate_date(timestamp): _, m, d, H, M, _, w, _, _ = timestamp.timetuple() # fix the weekday to be sunday=0 w = (w + 1) % 7 for (date_piece, selection) in zip((m, d, w, H, M), cron_settings): if date_piece not in selection: return False return True return validate_date
python
def crontab(minute='*', hour='*', day='*', month='*', day_of_week='*'): validation = ( ('m', month, range(1, 13)), ('d', day, range(1, 32)), ('w', day_of_week, range(8)), # 0-6, but also 7 for Sunday. ('H', hour, range(24)), ('M', minute, range(60)) ) cron_settings = [] for (date_str, value, acceptable) in validation: settings = set([]) if isinstance(value, int): value = str(value) for piece in value.split(','): if piece == '*': settings.update(acceptable) continue if piece.isdigit(): piece = int(piece) if piece not in acceptable: raise ValueError('%d is not a valid input' % piece) elif date_str == 'w': piece %= 7 settings.add(piece) else: dash_match = dash_re.match(piece) if dash_match: lhs, rhs = map(int, dash_match.groups()) if lhs not in acceptable or rhs not in acceptable: raise ValueError('%s is not a valid input' % piece) elif date_str == 'w': lhs %= 7 rhs %= 7 settings.update(range(lhs, rhs + 1)) continue # Handle stuff like */3, */6. every_match = every_re.match(piece) if every_match: if date_str == 'w': raise ValueError('Cannot perform this kind of matching' ' on day-of-week.') interval = int(every_match.groups()[0]) settings.update(acceptable[::interval]) cron_settings.append(sorted(list(settings))) def validate_date(timestamp): _, m, d, H, M, _, w, _, _ = timestamp.timetuple() # fix the weekday to be sunday=0 w = (w + 1) % 7 for (date_piece, selection) in zip((m, d, w, H, M), cron_settings): if date_piece not in selection: return False return True return validate_date
[ "def", "crontab", "(", "minute", "=", "'*'", ",", "hour", "=", "'*'", ",", "day", "=", "'*'", ",", "month", "=", "'*'", ",", "day_of_week", "=", "'*'", ")", ":", "validation", "=", "(", "(", "'m'", ",", "month", ",", "range", "(", "1", ",", "13...
Convert a "crontab"-style set of parameters into a test function that will return True when the given datetime matches the parameters set forth in the crontab. For day-of-week, 0=Sunday and 6=Saturday. Acceptable inputs: * = every distinct value */n = run every "n" times, i.e. hours='*/4' == 0, 4, 8, 12, 16, 20 m-n = run every time m..n m,n = run on m and n
[ "Convert", "a", "crontab", "-", "style", "set", "of", "parameters", "into", "a", "test", "function", "that", "will", "return", "True", "when", "the", "given", "datetime", "matches", "the", "parameters", "set", "forth", "in", "the", "crontab", "." ]
416e8da1ca18442c08431a91bce373de7d2d200f
https://github.com/coleifer/huey/blob/416e8da1ca18442c08431a91bce373de7d2d200f/huey/api.py#L913-L990
242,075
coleifer/huey
huey/storage.py
BaseStorage.put_if_empty
def put_if_empty(self, key, value): """ Atomically write data only if the key is not already set. :param bytes key: Key to check/set. :param bytes value: Arbitrary data. :return: Boolean whether key/value was set. """ if self.has_data_for_key(key): return False self.put_data(key, value) return True
python
def put_if_empty(self, key, value): if self.has_data_for_key(key): return False self.put_data(key, value) return True
[ "def", "put_if_empty", "(", "self", ",", "key", ",", "value", ")", ":", "if", "self", ".", "has_data_for_key", "(", "key", ")", ":", "return", "False", "self", ".", "put_data", "(", "key", ",", "value", ")", "return", "True" ]
Atomically write data only if the key is not already set. :param bytes key: Key to check/set. :param bytes value: Arbitrary data. :return: Boolean whether key/value was set.
[ "Atomically", "write", "data", "only", "if", "the", "key", "is", "not", "already", "set", "." ]
416e8da1ca18442c08431a91bce373de7d2d200f
https://github.com/coleifer/huey/blob/416e8da1ca18442c08431a91bce373de7d2d200f/huey/storage.py#L190-L201
242,076
coleifer/huey
huey/utils.py
make_naive
def make_naive(dt): """ Makes an aware datetime.datetime naive in local time zone. """ tt = dt.utctimetuple() ts = calendar.timegm(tt) local_tt = time.localtime(ts) return datetime.datetime(*local_tt[:6])
python
def make_naive(dt): tt = dt.utctimetuple() ts = calendar.timegm(tt) local_tt = time.localtime(ts) return datetime.datetime(*local_tt[:6])
[ "def", "make_naive", "(", "dt", ")", ":", "tt", "=", "dt", ".", "utctimetuple", "(", ")", "ts", "=", "calendar", ".", "timegm", "(", "tt", ")", "local_tt", "=", "time", ".", "localtime", "(", "ts", ")", "return", "datetime", ".", "datetime", "(", "...
Makes an aware datetime.datetime naive in local time zone.
[ "Makes", "an", "aware", "datetime", ".", "datetime", "naive", "in", "local", "time", "zone", "." ]
416e8da1ca18442c08431a91bce373de7d2d200f
https://github.com/coleifer/huey/blob/416e8da1ca18442c08431a91bce373de7d2d200f/huey/utils.py#L48-L55
242,077
coleifer/huey
huey/consumer.py
BaseProcess.sleep_for_interval
def sleep_for_interval(self, start_ts, nseconds): """ Sleep for a given interval with respect to the start timestamp. So, if the start timestamp is 1337 and nseconds is 10, the method will actually sleep for nseconds - (current_timestamp - start_timestamp). So if the current timestamp is 1340, we'll only sleep for 7 seconds (the goal being to sleep until 1347, or 1337 + 10). """ sleep_time = nseconds - (time.time() - start_ts) if sleep_time <= 0: return self._logger.debug('Sleeping for %s', sleep_time) # Recompute time to sleep to improve accuracy in case the process was # pre-empted by the kernel while logging. sleep_time = nseconds - (time.time() - start_ts) if sleep_time > 0: time.sleep(sleep_time)
python
def sleep_for_interval(self, start_ts, nseconds): sleep_time = nseconds - (time.time() - start_ts) if sleep_time <= 0: return self._logger.debug('Sleeping for %s', sleep_time) # Recompute time to sleep to improve accuracy in case the process was # pre-empted by the kernel while logging. sleep_time = nseconds - (time.time() - start_ts) if sleep_time > 0: time.sleep(sleep_time)
[ "def", "sleep_for_interval", "(", "self", ",", "start_ts", ",", "nseconds", ")", ":", "sleep_time", "=", "nseconds", "-", "(", "time", ".", "time", "(", ")", "-", "start_ts", ")", "if", "sleep_time", "<=", "0", ":", "return", "self", ".", "_logger", "....
Sleep for a given interval with respect to the start timestamp. So, if the start timestamp is 1337 and nseconds is 10, the method will actually sleep for nseconds - (current_timestamp - start_timestamp). So if the current timestamp is 1340, we'll only sleep for 7 seconds (the goal being to sleep until 1347, or 1337 + 10).
[ "Sleep", "for", "a", "given", "interval", "with", "respect", "to", "the", "start", "timestamp", "." ]
416e8da1ca18442c08431a91bce373de7d2d200f
https://github.com/coleifer/huey/blob/416e8da1ca18442c08431a91bce373de7d2d200f/huey/consumer.py#L39-L56
242,078
coleifer/huey
huey/consumer.py
Consumer.start
def start(self): """ Start all consumer processes and register signal handlers. """ if self.huey.immediate: raise ConfigurationError( 'Consumer cannot be run with Huey instances where immediate ' 'is enabled. Please check your configuration and ensure that ' '"huey.immediate = False".') # Log startup message. self._logger.info('Huey consumer started with %s %s, PID %s at %s', self.workers, self.worker_type, os.getpid(), self.huey._get_timestamp()) self._logger.info('Scheduler runs every %s second(s).', self.scheduler_interval) self._logger.info('Periodic tasks are %s.', 'enabled' if self.periodic else 'disabled') self._set_signal_handlers() msg = ['The following commands are available:'] for command in self.huey._registry._registry: msg.append('+ %s' % command) self._logger.info('\n'.join(msg)) # We'll temporarily ignore SIGINT and SIGHUP (so that it is inherited # by the child-processes). Once the child processes are created, we # restore the handler. original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN) if hasattr(signal, 'SIGHUP'): original_sighup_handler = signal.signal(signal.SIGHUP, signal.SIG_IGN) self.scheduler.start() for _, worker_process in self.worker_threads: worker_process.start() signal.signal(signal.SIGINT, original_sigint_handler) if hasattr(signal, 'SIGHUP'): signal.signal(signal.SIGHUP, original_sighup_handler)
python
def start(self): if self.huey.immediate: raise ConfigurationError( 'Consumer cannot be run with Huey instances where immediate ' 'is enabled. Please check your configuration and ensure that ' '"huey.immediate = False".') # Log startup message. self._logger.info('Huey consumer started with %s %s, PID %s at %s', self.workers, self.worker_type, os.getpid(), self.huey._get_timestamp()) self._logger.info('Scheduler runs every %s second(s).', self.scheduler_interval) self._logger.info('Periodic tasks are %s.', 'enabled' if self.periodic else 'disabled') self._set_signal_handlers() msg = ['The following commands are available:'] for command in self.huey._registry._registry: msg.append('+ %s' % command) self._logger.info('\n'.join(msg)) # We'll temporarily ignore SIGINT and SIGHUP (so that it is inherited # by the child-processes). Once the child processes are created, we # restore the handler. original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN) if hasattr(signal, 'SIGHUP'): original_sighup_handler = signal.signal(signal.SIGHUP, signal.SIG_IGN) self.scheduler.start() for _, worker_process in self.worker_threads: worker_process.start() signal.signal(signal.SIGINT, original_sigint_handler) if hasattr(signal, 'SIGHUP'): signal.signal(signal.SIGHUP, original_sighup_handler)
[ "def", "start", "(", "self", ")", ":", "if", "self", ".", "huey", ".", "immediate", ":", "raise", "ConfigurationError", "(", "'Consumer cannot be run with Huey instances where immediate '", "'is enabled. Please check your configuration and ensure that '", "'\"huey.immediate = Fal...
Start all consumer processes and register signal handlers.
[ "Start", "all", "consumer", "processes", "and", "register", "signal", "handlers", "." ]
416e8da1ca18442c08431a91bce373de7d2d200f
https://github.com/coleifer/huey/blob/416e8da1ca18442c08431a91bce373de7d2d200f/huey/consumer.py#L344-L383
242,079
coleifer/huey
huey/consumer.py
Consumer.stop
def stop(self, graceful=False): """ Set the stop-flag. If `graceful=True`, this method blocks until the workers to finish executing any tasks they might be currently working on. """ self.stop_flag.set() if graceful: self._logger.info('Shutting down gracefully...') try: for _, worker_process in self.worker_threads: worker_process.join() except KeyboardInterrupt: self._logger.info('Received request to shut down now.') else: self._logger.info('All workers have stopped.') else: self._logger.info('Shutting down')
python
def stop(self, graceful=False): self.stop_flag.set() if graceful: self._logger.info('Shutting down gracefully...') try: for _, worker_process in self.worker_threads: worker_process.join() except KeyboardInterrupt: self._logger.info('Received request to shut down now.') else: self._logger.info('All workers have stopped.') else: self._logger.info('Shutting down')
[ "def", "stop", "(", "self", ",", "graceful", "=", "False", ")", ":", "self", ".", "stop_flag", ".", "set", "(", ")", "if", "graceful", ":", "self", ".", "_logger", ".", "info", "(", "'Shutting down gracefully...'", ")", "try", ":", "for", "_", ",", "...
Set the stop-flag. If `graceful=True`, this method blocks until the workers to finish executing any tasks they might be currently working on.
[ "Set", "the", "stop", "-", "flag", "." ]
416e8da1ca18442c08431a91bce373de7d2d200f
https://github.com/coleifer/huey/blob/416e8da1ca18442c08431a91bce373de7d2d200f/huey/consumer.py#L385-L403
242,080
coleifer/huey
huey/consumer.py
Consumer.run
def run(self): """ Run the consumer. """ self.start() timeout = self._stop_flag_timeout health_check_ts = time.time() while True: try: self.stop_flag.wait(timeout=timeout) except KeyboardInterrupt: self._logger.info('Received SIGINT') self.stop(graceful=True) except: self._logger.exception('Error in consumer.') self.stop() else: if self._received_signal: self.stop(graceful=self._graceful) if self.stop_flag.is_set(): break if self._health_check: now = time.time() if now >= health_check_ts + self._health_check_interval: health_check_ts = now self.check_worker_health() if self._restart: self._logger.info('Consumer will restart.') python = sys.executable os.execl(python, python, *sys.argv) else: self._logger.info('Consumer exiting.')
python
def run(self): self.start() timeout = self._stop_flag_timeout health_check_ts = time.time() while True: try: self.stop_flag.wait(timeout=timeout) except KeyboardInterrupt: self._logger.info('Received SIGINT') self.stop(graceful=True) except: self._logger.exception('Error in consumer.') self.stop() else: if self._received_signal: self.stop(graceful=self._graceful) if self.stop_flag.is_set(): break if self._health_check: now = time.time() if now >= health_check_ts + self._health_check_interval: health_check_ts = now self.check_worker_health() if self._restart: self._logger.info('Consumer will restart.') python = sys.executable os.execl(python, python, *sys.argv) else: self._logger.info('Consumer exiting.')
[ "def", "run", "(", "self", ")", ":", "self", ".", "start", "(", ")", "timeout", "=", "self", ".", "_stop_flag_timeout", "health_check_ts", "=", "time", ".", "time", "(", ")", "while", "True", ":", "try", ":", "self", ".", "stop_flag", ".", "wait", "(...
Run the consumer.
[ "Run", "the", "consumer", "." ]
416e8da1ca18442c08431a91bce373de7d2d200f
https://github.com/coleifer/huey/blob/416e8da1ca18442c08431a91bce373de7d2d200f/huey/consumer.py#L405-L440
242,081
coleifer/huey
huey/consumer.py
Consumer.check_worker_health
def check_worker_health(self): """ Check the health of the worker processes. Workers that have died will be replaced with new workers. """ self._logger.debug('Checking worker health.') workers = [] restart_occurred = False for i, (worker, worker_t) in enumerate(self.worker_threads): if not self.environment.is_alive(worker_t): self._logger.warning('Worker %d died, restarting.', i + 1) worker = self._create_worker() worker_t = self._create_process(worker, 'Worker-%d' % (i + 1)) worker_t.start() restart_occurred = True workers.append((worker, worker_t)) if restart_occurred: self.worker_threads = workers else: self._logger.debug('Workers are up and running.') if not self.environment.is_alive(self.scheduler): self._logger.warning('Scheduler died, restarting.') scheduler = self._create_scheduler() self.scheduler = self._create_process(scheduler, 'Scheduler') self.scheduler.start() else: self._logger.debug('Scheduler is up and running.') return not restart_occurred
python
def check_worker_health(self): self._logger.debug('Checking worker health.') workers = [] restart_occurred = False for i, (worker, worker_t) in enumerate(self.worker_threads): if not self.environment.is_alive(worker_t): self._logger.warning('Worker %d died, restarting.', i + 1) worker = self._create_worker() worker_t = self._create_process(worker, 'Worker-%d' % (i + 1)) worker_t.start() restart_occurred = True workers.append((worker, worker_t)) if restart_occurred: self.worker_threads = workers else: self._logger.debug('Workers are up and running.') if not self.environment.is_alive(self.scheduler): self._logger.warning('Scheduler died, restarting.') scheduler = self._create_scheduler() self.scheduler = self._create_process(scheduler, 'Scheduler') self.scheduler.start() else: self._logger.debug('Scheduler is up and running.') return not restart_occurred
[ "def", "check_worker_health", "(", "self", ")", ":", "self", ".", "_logger", ".", "debug", "(", "'Checking worker health.'", ")", "workers", "=", "[", "]", "restart_occurred", "=", "False", "for", "i", ",", "(", "worker", ",", "worker_t", ")", "in", "enume...
Check the health of the worker processes. Workers that have died will be replaced with new workers.
[ "Check", "the", "health", "of", "the", "worker", "processes", ".", "Workers", "that", "have", "died", "will", "be", "replaced", "with", "new", "workers", "." ]
416e8da1ca18442c08431a91bce373de7d2d200f
https://github.com/coleifer/huey/blob/416e8da1ca18442c08431a91bce373de7d2d200f/huey/consumer.py#L442-L472
242,082
coleifer/huey
huey/contrib/djhuey/__init__.py
close_db
def close_db(fn): """Decorator to be used with tasks that may operate on the database.""" @wraps(fn) def inner(*args, **kwargs): try: return fn(*args, **kwargs) finally: if not HUEY.immediate: close_old_connections() return inner
python
def close_db(fn): @wraps(fn) def inner(*args, **kwargs): try: return fn(*args, **kwargs) finally: if not HUEY.immediate: close_old_connections() return inner
[ "def", "close_db", "(", "fn", ")", ":", "@", "wraps", "(", "fn", ")", "def", "inner", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "return", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "finally", ":", "if", "not"...
Decorator to be used with tasks that may operate on the database.
[ "Decorator", "to", "be", "used", "with", "tasks", "that", "may", "operate", "on", "the", "database", "." ]
416e8da1ca18442c08431a91bce373de7d2d200f
https://github.com/coleifer/huey/blob/416e8da1ca18442c08431a91bce373de7d2d200f/huey/contrib/djhuey/__init__.py#L123-L132
242,083
johnwheeler/flask-ask
samples/purchase/model.py
Product.list
def list(self): """ return list of purchasable and not entitled products""" mylist = [] for prod in self.product_list: if self.purchasable(prod) and not self.entitled(prod): mylist.append(prod) return mylist
python
def list(self): mylist = [] for prod in self.product_list: if self.purchasable(prod) and not self.entitled(prod): mylist.append(prod) return mylist
[ "def", "list", "(", "self", ")", ":", "mylist", "=", "[", "]", "for", "prod", "in", "self", ".", "product_list", ":", "if", "self", ".", "purchasable", "(", "prod", ")", "and", "not", "self", ".", "entitled", "(", "prod", ")", ":", "mylist", ".", ...
return list of purchasable and not entitled products
[ "return", "list", "of", "purchasable", "and", "not", "entitled", "products" ]
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/samples/purchase/model.py#L51-L57
242,084
johnwheeler/flask-ask
samples/tidepooler/tidepooler.py
_find_tide_info
def _find_tide_info(predictions): """ Algorithm to find the 2 high tides for the day, the first of which is smaller and occurs mid-day, the second of which is larger and typically in the evening. """ last_prediction = None first_high_tide = None second_high_tide = None low_tide = None first_tide_done = False for prediction in predictions: if last_prediction is None: last_prediction = prediction continue if last_prediction['v'] < prediction['v']: if not first_tide_done: first_high_tide = prediction else: second_high_tide = prediction else: # we're decreasing if not first_tide_done and first_high_tide is not None: first_tide_done = True elif second_high_tide is not None: break # we're decreasing after having found the 2nd tide. We're done. if first_tide_done: low_tide = prediction last_prediction = prediction fmt = '%Y-%m-%d %H:%M' parse = datetime.datetime.strptime tideinfo = TideInfo() tideinfo.first_high_tide_time = parse(first_high_tide['t'], fmt) tideinfo.first_high_tide_height = float(first_high_tide['v']) tideinfo.second_high_tide_time = parse(second_high_tide['t'], fmt) tideinfo.second_high_tide_height = float(second_high_tide['v']) tideinfo.low_tide_time = parse(low_tide['t'], fmt) tideinfo.low_tide_height = float(low_tide['v']) return tideinfo
python
def _find_tide_info(predictions): last_prediction = None first_high_tide = None second_high_tide = None low_tide = None first_tide_done = False for prediction in predictions: if last_prediction is None: last_prediction = prediction continue if last_prediction['v'] < prediction['v']: if not first_tide_done: first_high_tide = prediction else: second_high_tide = prediction else: # we're decreasing if not first_tide_done and first_high_tide is not None: first_tide_done = True elif second_high_tide is not None: break # we're decreasing after having found the 2nd tide. We're done. if first_tide_done: low_tide = prediction last_prediction = prediction fmt = '%Y-%m-%d %H:%M' parse = datetime.datetime.strptime tideinfo = TideInfo() tideinfo.first_high_tide_time = parse(first_high_tide['t'], fmt) tideinfo.first_high_tide_height = float(first_high_tide['v']) tideinfo.second_high_tide_time = parse(second_high_tide['t'], fmt) tideinfo.second_high_tide_height = float(second_high_tide['v']) tideinfo.low_tide_time = parse(low_tide['t'], fmt) tideinfo.low_tide_height = float(low_tide['v']) return tideinfo
[ "def", "_find_tide_info", "(", "predictions", ")", ":", "last_prediction", "=", "None", "first_high_tide", "=", "None", "second_high_tide", "=", "None", "low_tide", "=", "None", "first_tide_done", "=", "False", "for", "prediction", "in", "predictions", ":", "if", ...
Algorithm to find the 2 high tides for the day, the first of which is smaller and occurs mid-day, the second of which is larger and typically in the evening.
[ "Algorithm", "to", "find", "the", "2", "high", "tides", "for", "the", "day", "the", "first", "of", "which", "is", "smaller", "and", "occurs", "mid", "-", "day", "the", "second", "of", "which", "is", "larger", "and", "typically", "in", "the", "evening", ...
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/samples/tidepooler/tidepooler.py#L252-L290
242,085
johnwheeler/flask-ask
flask_ask/models.py
audio.enqueue
def enqueue(self, stream_url, offset=0, opaque_token=None): """Adds stream to the queue. Does not impact the currently playing stream.""" directive = self._play_directive('ENQUEUE') audio_item = self._audio_item(stream_url=stream_url, offset=offset, push_buffer=False, opaque_token=opaque_token) audio_item['stream']['expectedPreviousToken'] = current_stream.token directive['audioItem'] = audio_item self._response['directives'].append(directive) return self
python
def enqueue(self, stream_url, offset=0, opaque_token=None): directive = self._play_directive('ENQUEUE') audio_item = self._audio_item(stream_url=stream_url, offset=offset, push_buffer=False, opaque_token=opaque_token) audio_item['stream']['expectedPreviousToken'] = current_stream.token directive['audioItem'] = audio_item self._response['directives'].append(directive) return self
[ "def", "enqueue", "(", "self", ",", "stream_url", ",", "offset", "=", "0", ",", "opaque_token", "=", "None", ")", ":", "directive", "=", "self", ".", "_play_directive", "(", "'ENQUEUE'", ")", "audio_item", "=", "self", ".", "_audio_item", "(", "stream_url"...
Adds stream to the queue. Does not impact the currently playing stream.
[ "Adds", "stream", "to", "the", "queue", ".", "Does", "not", "impact", "the", "currently", "playing", "stream", "." ]
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/flask_ask/models.py#L364-L375
242,086
johnwheeler/flask-ask
flask_ask/models.py
audio.play_next
def play_next(self, stream_url=None, offset=0, opaque_token=None): """Replace all streams in the queue but does not impact the currently playing stream.""" directive = self._play_directive('REPLACE_ENQUEUED') directive['audioItem'] = self._audio_item(stream_url=stream_url, offset=offset, opaque_token=opaque_token) self._response['directives'].append(directive) return self
python
def play_next(self, stream_url=None, offset=0, opaque_token=None): directive = self._play_directive('REPLACE_ENQUEUED') directive['audioItem'] = self._audio_item(stream_url=stream_url, offset=offset, opaque_token=opaque_token) self._response['directives'].append(directive) return self
[ "def", "play_next", "(", "self", ",", "stream_url", "=", "None", ",", "offset", "=", "0", ",", "opaque_token", "=", "None", ")", ":", "directive", "=", "self", ".", "_play_directive", "(", "'REPLACE_ENQUEUED'", ")", "directive", "[", "'audioItem'", "]", "=...
Replace all streams in the queue but does not impact the currently playing stream.
[ "Replace", "all", "streams", "in", "the", "queue", "but", "does", "not", "impact", "the", "currently", "playing", "stream", "." ]
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/flask_ask/models.py#L377-L383
242,087
johnwheeler/flask-ask
flask_ask/models.py
audio.resume
def resume(self): """Sends Play Directive to resume playback at the paused offset""" directive = self._play_directive('REPLACE_ALL') directive['audioItem'] = self._audio_item() self._response['directives'].append(directive) return self
python
def resume(self): directive = self._play_directive('REPLACE_ALL') directive['audioItem'] = self._audio_item() self._response['directives'].append(directive) return self
[ "def", "resume", "(", "self", ")", ":", "directive", "=", "self", ".", "_play_directive", "(", "'REPLACE_ALL'", ")", "directive", "[", "'audioItem'", "]", "=", "self", ".", "_audio_item", "(", ")", "self", ".", "_response", "[", "'directives'", "]", ".", ...
Sends Play Directive to resume playback at the paused offset
[ "Sends", "Play", "Directive", "to", "resume", "playback", "at", "the", "paused", "offset" ]
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/flask_ask/models.py#L385-L390
242,088
johnwheeler/flask-ask
flask_ask/models.py
audio._audio_item
def _audio_item(self, stream_url=None, offset=0, push_buffer=True, opaque_token=None): """Builds an AudioPlayer Directive's audioItem and updates current_stream""" audio_item = {'stream': {}} stream = audio_item['stream'] # existing stream if not stream_url: # stream.update(current_stream.__dict__) stream['url'] = current_stream.url stream['token'] = current_stream.token stream['offsetInMilliseconds'] = current_stream.offsetInMilliseconds # new stream else: stream['url'] = stream_url stream['token'] = opaque_token or str(uuid.uuid4()) stream['offsetInMilliseconds'] = offset if push_buffer: # prevents enqueued streams from becoming current_stream push_stream(stream_cache, context['System']['user']['userId'], stream) return audio_item
python
def _audio_item(self, stream_url=None, offset=0, push_buffer=True, opaque_token=None): audio_item = {'stream': {}} stream = audio_item['stream'] # existing stream if not stream_url: # stream.update(current_stream.__dict__) stream['url'] = current_stream.url stream['token'] = current_stream.token stream['offsetInMilliseconds'] = current_stream.offsetInMilliseconds # new stream else: stream['url'] = stream_url stream['token'] = opaque_token or str(uuid.uuid4()) stream['offsetInMilliseconds'] = offset if push_buffer: # prevents enqueued streams from becoming current_stream push_stream(stream_cache, context['System']['user']['userId'], stream) return audio_item
[ "def", "_audio_item", "(", "self", ",", "stream_url", "=", "None", ",", "offset", "=", "0", ",", "push_buffer", "=", "True", ",", "opaque_token", "=", "None", ")", ":", "audio_item", "=", "{", "'stream'", ":", "{", "}", "}", "stream", "=", "audio_item"...
Builds an AudioPlayer Directive's audioItem and updates current_stream
[ "Builds", "an", "AudioPlayer", "Directive", "s", "audioItem", "and", "updates", "current_stream" ]
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/flask_ask/models.py#L398-L418
242,089
johnwheeler/flask-ask
flask_ask/models.py
audio.clear_queue
def clear_queue(self, stop=False): """Clears queued streams and optionally stops current stream. Keyword Arguments: stop {bool} set True to stop current current stream and clear queued streams. set False to clear queued streams and allow current stream to finish default: {False} """ directive = {} directive['type'] = 'AudioPlayer.ClearQueue' if stop: directive['clearBehavior'] = 'CLEAR_ALL' else: directive['clearBehavior'] = 'CLEAR_ENQUEUED' self._response['directives'].append(directive) return self
python
def clear_queue(self, stop=False): directive = {} directive['type'] = 'AudioPlayer.ClearQueue' if stop: directive['clearBehavior'] = 'CLEAR_ALL' else: directive['clearBehavior'] = 'CLEAR_ENQUEUED' self._response['directives'].append(directive) return self
[ "def", "clear_queue", "(", "self", ",", "stop", "=", "False", ")", ":", "directive", "=", "{", "}", "directive", "[", "'type'", "]", "=", "'AudioPlayer.ClearQueue'", "if", "stop", ":", "directive", "[", "'clearBehavior'", "]", "=", "'CLEAR_ALL'", "else", "...
Clears queued streams and optionally stops current stream. Keyword Arguments: stop {bool} set True to stop current current stream and clear queued streams. set False to clear queued streams and allow current stream to finish default: {False}
[ "Clears", "queued", "streams", "and", "optionally", "stops", "current", "stream", "." ]
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/flask_ask/models.py#L425-L442
242,090
johnwheeler/flask-ask
flask_ask/core.py
find_ask
def find_ask(): """ Find our instance of Ask, navigating Local's and possible blueprints. Note: This only supports returning a reference to the first instance of Ask found. """ if hasattr(current_app, 'ask'): return getattr(current_app, 'ask') else: if hasattr(current_app, 'blueprints'): blueprints = getattr(current_app, 'blueprints') for blueprint_name in blueprints: if hasattr(blueprints[blueprint_name], 'ask'): return getattr(blueprints[blueprint_name], 'ask')
python
def find_ask(): if hasattr(current_app, 'ask'): return getattr(current_app, 'ask') else: if hasattr(current_app, 'blueprints'): blueprints = getattr(current_app, 'blueprints') for blueprint_name in blueprints: if hasattr(blueprints[blueprint_name], 'ask'): return getattr(blueprints[blueprint_name], 'ask')
[ "def", "find_ask", "(", ")", ":", "if", "hasattr", "(", "current_app", ",", "'ask'", ")", ":", "return", "getattr", "(", "current_app", ",", "'ask'", ")", "else", ":", "if", "hasattr", "(", "current_app", ",", "'blueprints'", ")", ":", "blueprints", "=",...
Find our instance of Ask, navigating Local's and possible blueprints. Note: This only supports returning a reference to the first instance of Ask found.
[ "Find", "our", "instance", "of", "Ask", "navigating", "Local", "s", "and", "possible", "blueprints", "." ]
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/flask_ask/core.py#L21-L35
242,091
johnwheeler/flask-ask
flask_ask/core.py
Ask.init_app
def init_app(self, app, path='templates.yaml'): """Initializes Ask app by setting configuration variables, loading templates, and maps Ask route to a flask view. The Ask instance is given the following configuration variables by calling on Flask's configuration: `ASK_APPLICATION_ID`: Turn on application ID verification by setting this variable to an application ID or a list of allowed application IDs. By default, application ID verification is disabled and a warning is logged. This variable should be set in production to ensure requests are being sent by the applications you specify. Default: None `ASK_VERIFY_REQUESTS`: Enables or disables Alexa request verification, which ensures requests sent to your skill are from Amazon's Alexa service. This setting should not be disabled in production. It is useful for mocking JSON requests in automated tests. Default: True `ASK_VERIFY_TIMESTAMP_DEBUG`: Turn on request timestamp verification while debugging by setting this to True. Timestamp verification helps mitigate against replay attacks. It relies on the system clock being synchronized with an NTP server. This setting should not be enabled in production. Default: False `ASK_PRETTY_DEBUG_LOGS`: Add tabs and linebreaks to the Alexa request and response printed to the debug log. This improves readability when printing to the console, but breaks formatting when logging to CloudWatch. Default: False """ if self._route is None: raise TypeError("route is a required argument when app is not None") self.app = app app.ask = self app.add_url_rule(self._route, view_func=self._flask_view_func, methods=['POST']) app.jinja_loader = ChoiceLoader([app.jinja_loader, YamlLoader(app, path)])
python
def init_app(self, app, path='templates.yaml'): if self._route is None: raise TypeError("route is a required argument when app is not None") self.app = app app.ask = self app.add_url_rule(self._route, view_func=self._flask_view_func, methods=['POST']) app.jinja_loader = ChoiceLoader([app.jinja_loader, YamlLoader(app, path)])
[ "def", "init_app", "(", "self", ",", "app", ",", "path", "=", "'templates.yaml'", ")", ":", "if", "self", ".", "_route", "is", "None", ":", "raise", "TypeError", "(", "\"route is a required argument when app is not None\"", ")", "self", ".", "app", "=", "app",...
Initializes Ask app by setting configuration variables, loading templates, and maps Ask route to a flask view. The Ask instance is given the following configuration variables by calling on Flask's configuration: `ASK_APPLICATION_ID`: Turn on application ID verification by setting this variable to an application ID or a list of allowed application IDs. By default, application ID verification is disabled and a warning is logged. This variable should be set in production to ensure requests are being sent by the applications you specify. Default: None `ASK_VERIFY_REQUESTS`: Enables or disables Alexa request verification, which ensures requests sent to your skill are from Amazon's Alexa service. This setting should not be disabled in production. It is useful for mocking JSON requests in automated tests. Default: True `ASK_VERIFY_TIMESTAMP_DEBUG`: Turn on request timestamp verification while debugging by setting this to True. Timestamp verification helps mitigate against replay attacks. It relies on the system clock being synchronized with an NTP server. This setting should not be enabled in production. Default: False `ASK_PRETTY_DEBUG_LOGS`: Add tabs and linebreaks to the Alexa request and response printed to the debug log. This improves readability when printing to the console, but breaks formatting when logging to CloudWatch. Default: False
[ "Initializes", "Ask", "app", "by", "setting", "configuration", "variables", "loading", "templates", "and", "maps", "Ask", "route", "to", "a", "flask", "view", "." ]
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/flask_ask/core.py#L101-L142
242,092
johnwheeler/flask-ask
flask_ask/core.py
Ask.launch
def launch(self, f): """Decorator maps a view function as the endpoint for an Alexa LaunchRequest and starts the skill. @ask.launch def launched(): return question('Welcome to Foo') The wrapped function is registered as the launch view function and renders the response for requests to the Launch URL. A request to the launch URL is verified with the Alexa server before the payload is passed to the view function. Arguments: f {function} -- Launch view function """ self._launch_view_func = f @wraps(f) def wrapper(*args, **kw): self._flask_view_func(*args, **kw) return f
python
def launch(self, f): self._launch_view_func = f @wraps(f) def wrapper(*args, **kw): self._flask_view_func(*args, **kw) return f
[ "def", "launch", "(", "self", ",", "f", ")", ":", "self", ".", "_launch_view_func", "=", "f", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kw", ")", ":", "self", ".", "_flask_view_func", "(", "*", "args", ",", ...
Decorator maps a view function as the endpoint for an Alexa LaunchRequest and starts the skill. @ask.launch def launched(): return question('Welcome to Foo') The wrapped function is registered as the launch view function and renders the response for requests to the Launch URL. A request to the launch URL is verified with the Alexa server before the payload is passed to the view function. Arguments: f {function} -- Launch view function
[ "Decorator", "maps", "a", "view", "function", "as", "the", "endpoint", "for", "an", "Alexa", "LaunchRequest", "and", "starts", "the", "skill", "." ]
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/flask_ask/core.py#L192-L212
242,093
johnwheeler/flask-ask
flask_ask/core.py
Ask.session_ended
def session_ended(self, f): """Decorator routes Alexa SessionEndedRequest to the wrapped view function to end the skill. @ask.session_ended def session_ended(): return "{}", 200 The wrapped function is registered as the session_ended view function and renders the response for requests to the end of the session. Arguments: f {function} -- session_ended view function """ self._session_ended_view_func = f @wraps(f) def wrapper(*args, **kw): self._flask_view_func(*args, **kw) return f
python
def session_ended(self, f): self._session_ended_view_func = f @wraps(f) def wrapper(*args, **kw): self._flask_view_func(*args, **kw) return f
[ "def", "session_ended", "(", "self", ",", "f", ")", ":", "self", ".", "_session_ended_view_func", "=", "f", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kw", ")", ":", "self", ".", "_flask_view_func", "(", "*", "...
Decorator routes Alexa SessionEndedRequest to the wrapped view function to end the skill. @ask.session_ended def session_ended(): return "{}", 200 The wrapped function is registered as the session_ended view function and renders the response for requests to the end of the session. Arguments: f {function} -- session_ended view function
[ "Decorator", "routes", "Alexa", "SessionEndedRequest", "to", "the", "wrapped", "view", "function", "to", "end", "the", "skill", "." ]
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/flask_ask/core.py#L214-L232
242,094
johnwheeler/flask-ask
flask_ask/core.py
Ask.intent
def intent(self, intent_name, mapping={}, convert={}, default={}): """Decorator routes an Alexa IntentRequest and provides the slot parameters to the wrapped function. Functions decorated as an intent are registered as the view function for the Intent's URL, and provide the backend responses to give your Skill its functionality. @ask.intent('WeatherIntent', mapping={'city': 'City'}) def weather(city): return statement('I predict great weather for {}'.format(city)) Arguments: intent_name {str} -- Name of the intent request to be mapped to the decorated function Keyword Arguments: mapping {dict} -- Maps parameters to intent slots of a different name default: {} convert {dict} -- Converts slot values to data types before assignment to parameters default: {} default {dict} -- Provides default values for Intent slots if Alexa reuqest returns no corresponding slot, or a slot with an empty value default: {} """ def decorator(f): self._intent_view_funcs[intent_name] = f self._intent_mappings[intent_name] = mapping self._intent_converts[intent_name] = convert self._intent_defaults[intent_name] = default @wraps(f) def wrapper(*args, **kw): self._flask_view_func(*args, **kw) return f return decorator
python
def intent(self, intent_name, mapping={}, convert={}, default={}): def decorator(f): self._intent_view_funcs[intent_name] = f self._intent_mappings[intent_name] = mapping self._intent_converts[intent_name] = convert self._intent_defaults[intent_name] = default @wraps(f) def wrapper(*args, **kw): self._flask_view_func(*args, **kw) return f return decorator
[ "def", "intent", "(", "self", ",", "intent_name", ",", "mapping", "=", "{", "}", ",", "convert", "=", "{", "}", ",", "default", "=", "{", "}", ")", ":", "def", "decorator", "(", "f", ")", ":", "self", ".", "_intent_view_funcs", "[", "intent_name", ...
Decorator routes an Alexa IntentRequest and provides the slot parameters to the wrapped function. Functions decorated as an intent are registered as the view function for the Intent's URL, and provide the backend responses to give your Skill its functionality. @ask.intent('WeatherIntent', mapping={'city': 'City'}) def weather(city): return statement('I predict great weather for {}'.format(city)) Arguments: intent_name {str} -- Name of the intent request to be mapped to the decorated function Keyword Arguments: mapping {dict} -- Maps parameters to intent slots of a different name default: {} convert {dict} -- Converts slot values to data types before assignment to parameters default: {} default {dict} -- Provides default values for Intent slots if Alexa reuqest returns no corresponding slot, or a slot with an empty value default: {}
[ "Decorator", "routes", "an", "Alexa", "IntentRequest", "and", "provides", "the", "slot", "parameters", "to", "the", "wrapped", "function", "." ]
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/flask_ask/core.py#L234-L268
242,095
johnwheeler/flask-ask
flask_ask/core.py
Ask.default_intent
def default_intent(self, f): """Decorator routes any Alexa IntentRequest that is not matched by any existing @ask.intent routing.""" self._default_intent_view_func = f @wraps(f) def wrapper(*args, **kw): self._flask_view_func(*args, **kw) return f
python
def default_intent(self, f): self._default_intent_view_func = f @wraps(f) def wrapper(*args, **kw): self._flask_view_func(*args, **kw) return f
[ "def", "default_intent", "(", "self", ",", "f", ")", ":", "self", ".", "_default_intent_view_func", "=", "f", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kw", ")", ":", "self", ".", "_flask_view_func", "(", "*", ...
Decorator routes any Alexa IntentRequest that is not matched by any existing @ask.intent routing.
[ "Decorator", "routes", "any", "Alexa", "IntentRequest", "that", "is", "not", "matched", "by", "any", "existing" ]
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/flask_ask/core.py#L270-L277
242,096
johnwheeler/flask-ask
flask_ask/core.py
Ask.display_element_selected
def display_element_selected(self, f): """Decorator routes Alexa Display.ElementSelected request to the wrapped view function. @ask.display_element_selected def eval_element(): return "", 200 The wrapped function is registered as the display_element_selected view function and renders the response for requests. Arguments: f {function} -- display_element_selected view function """ self._display_element_selected_func = f @wraps(f) def wrapper(*args, **kw): self._flask_view_func(*args, **kw) return f
python
def display_element_selected(self, f): self._display_element_selected_func = f @wraps(f) def wrapper(*args, **kw): self._flask_view_func(*args, **kw) return f
[ "def", "display_element_selected", "(", "self", ",", "f", ")", ":", "self", ".", "_display_element_selected_func", "=", "f", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kw", ")", ":", "self", ".", "_flask_view_func", ...
Decorator routes Alexa Display.ElementSelected request to the wrapped view function. @ask.display_element_selected def eval_element(): return "", 200 The wrapped function is registered as the display_element_selected view function and renders the response for requests. Arguments: f {function} -- display_element_selected view function
[ "Decorator", "routes", "Alexa", "Display", ".", "ElementSelected", "request", "to", "the", "wrapped", "view", "function", "." ]
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/flask_ask/core.py#L279-L297
242,097
johnwheeler/flask-ask
flask_ask/core.py
Ask.on_purchase_completed
def on_purchase_completed(self, mapping={'payload': 'payload','name':'name','status':'status','token':'token'}, convert={}, default={}): """Decorator routes an Connections.Response to the wrapped function. Request is sent when Alexa completes the purchase flow. See https://developer.amazon.com/docs/in-skill-purchase/add-isps-to-a-skill.html#handle-results The wrapped view function may accept parameters from the Request. In addition to locale, requestId, timestamp, and type @ask.on_purchase_completed( mapping={'payload': 'payload','name':'name','status':'status','token':'token'}) def completed(payload, name, status, token): logger.info(payload) logger.info(name) logger.info(status) logger.info(token) """ def decorator(f): self._intent_view_funcs['Connections.Response'] = f self._intent_mappings['Connections.Response'] = mapping self._intent_converts['Connections.Response'] = convert self._intent_defaults['Connections.Response'] = default @wraps(f) def wrapper(*args, **kwargs): self._flask_view_func(*args, **kwargs) return f return decorator
python
def on_purchase_completed(self, mapping={'payload': 'payload','name':'name','status':'status','token':'token'}, convert={}, default={}): def decorator(f): self._intent_view_funcs['Connections.Response'] = f self._intent_mappings['Connections.Response'] = mapping self._intent_converts['Connections.Response'] = convert self._intent_defaults['Connections.Response'] = default @wraps(f) def wrapper(*args, **kwargs): self._flask_view_func(*args, **kwargs) return f return decorator
[ "def", "on_purchase_completed", "(", "self", ",", "mapping", "=", "{", "'payload'", ":", "'payload'", ",", "'name'", ":", "'name'", ",", "'status'", ":", "'status'", ",", "'token'", ":", "'token'", "}", ",", "convert", "=", "{", "}", ",", "default", "=",...
Decorator routes an Connections.Response to the wrapped function. Request is sent when Alexa completes the purchase flow. See https://developer.amazon.com/docs/in-skill-purchase/add-isps-to-a-skill.html#handle-results The wrapped view function may accept parameters from the Request. In addition to locale, requestId, timestamp, and type @ask.on_purchase_completed( mapping={'payload': 'payload','name':'name','status':'status','token':'token'}) def completed(payload, name, status, token): logger.info(payload) logger.info(name) logger.info(status) logger.info(token)
[ "Decorator", "routes", "an", "Connections", ".", "Response", "to", "the", "wrapped", "function", "." ]
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/flask_ask/core.py#L300-L328
242,098
johnwheeler/flask-ask
flask_ask/core.py
Ask.run_aws_lambda
def run_aws_lambda(self, event): """Invoke the Flask Ask application from an AWS Lambda function handler. Use this method to service AWS Lambda requests from a custom Alexa skill. This method will invoke your Flask application providing a WSGI-compatible environment that wraps the original Alexa event provided to the AWS Lambda handler. Returns the output generated by a Flask Ask application, which should be used as the return value to the AWS Lambda handler function. Example usage: from flask import Flask from flask_ask import Ask, statement app = Flask(__name__) ask = Ask(app, '/') # This function name is what you defined when you create an # AWS Lambda function. By default, AWS calls this function # lambda_handler. def lambda_handler(event, _context): return ask.run_aws_lambda(event) @ask.intent('HelloIntent') def hello(firstname): speech_text = "Hello %s" % firstname return statement(speech_text).simple_card('Hello', speech_text) """ # We are guaranteed to be called by AWS as a Lambda function does not # expose a public facing interface. self.app.config['ASK_VERIFY_REQUESTS'] = False # Convert an environment variable to a WSGI "bytes-as-unicode" string enc, esc = sys.getfilesystemencoding(), 'surrogateescape' def unicode_to_wsgi(u): return u.encode(enc, esc).decode('iso-8859-1') # Create a WSGI-compatible environ that can be passed to the # application. It is loaded with the OS environment variables, # mandatory CGI-like variables, as well as the mandatory WSGI # variables. environ = {k: unicode_to_wsgi(v) for k, v in os.environ.items()} environ['REQUEST_METHOD'] = 'POST' environ['PATH_INFO'] = '/' environ['SERVER_NAME'] = 'AWS-Lambda' environ['SERVER_PORT'] = '80' environ['SERVER_PROTOCOL'] = 'HTTP/1.0' environ['wsgi.version'] = (1, 0) environ['wsgi.url_scheme'] = 'http' environ['wsgi.errors'] = sys.stderr environ['wsgi.multithread'] = False environ['wsgi.multiprocess'] = False environ['wsgi.run_once'] = True # Convert the event provided by the AWS Lambda handler to a JSON # string that can be read as the body of a HTTP POST request. body = json.dumps(event) environ['CONTENT_TYPE'] = 'application/json' environ['CONTENT_LENGTH'] = len(body) PY3 = sys.version_info[0] == 3 if PY3: environ['wsgi.input'] = io.StringIO(body) else: environ['wsgi.input'] = io.BytesIO(body) # Start response is a required callback that must be passed when # the application is invoked. It is used to set HTTP status and # headers. Read the WSGI spec for details (PEP3333). headers = [] def start_response(status, response_headers, _exc_info=None): headers[:] = [status, response_headers] # Invoke the actual Flask application providing our environment, # with our Alexa event as the body of the HTTP request, as well # as the callback function above. The result will be an iterator # that provides a serialized JSON string for our Alexa response. result = self.app(environ, start_response) try: if not headers: raise AssertionError("start_response() not called by WSGI app") output = b"".join(result) if not headers[0].startswith("2"): raise AssertionError("Non-2xx from app: hdrs={}, body={}".format(headers, output)) # The Lambda handler expects a Python object that can be # serialized as JSON, so we need to take the already serialized # JSON and deserialize it. return json.loads(output) finally: # Per the WSGI spec, we need to invoke the close method if it # is implemented on the result object. if hasattr(result, 'close'): result.close()
python
def run_aws_lambda(self, event): # We are guaranteed to be called by AWS as a Lambda function does not # expose a public facing interface. self.app.config['ASK_VERIFY_REQUESTS'] = False # Convert an environment variable to a WSGI "bytes-as-unicode" string enc, esc = sys.getfilesystemencoding(), 'surrogateescape' def unicode_to_wsgi(u): return u.encode(enc, esc).decode('iso-8859-1') # Create a WSGI-compatible environ that can be passed to the # application. It is loaded with the OS environment variables, # mandatory CGI-like variables, as well as the mandatory WSGI # variables. environ = {k: unicode_to_wsgi(v) for k, v in os.environ.items()} environ['REQUEST_METHOD'] = 'POST' environ['PATH_INFO'] = '/' environ['SERVER_NAME'] = 'AWS-Lambda' environ['SERVER_PORT'] = '80' environ['SERVER_PROTOCOL'] = 'HTTP/1.0' environ['wsgi.version'] = (1, 0) environ['wsgi.url_scheme'] = 'http' environ['wsgi.errors'] = sys.stderr environ['wsgi.multithread'] = False environ['wsgi.multiprocess'] = False environ['wsgi.run_once'] = True # Convert the event provided by the AWS Lambda handler to a JSON # string that can be read as the body of a HTTP POST request. body = json.dumps(event) environ['CONTENT_TYPE'] = 'application/json' environ['CONTENT_LENGTH'] = len(body) PY3 = sys.version_info[0] == 3 if PY3: environ['wsgi.input'] = io.StringIO(body) else: environ['wsgi.input'] = io.BytesIO(body) # Start response is a required callback that must be passed when # the application is invoked. It is used to set HTTP status and # headers. Read the WSGI spec for details (PEP3333). headers = [] def start_response(status, response_headers, _exc_info=None): headers[:] = [status, response_headers] # Invoke the actual Flask application providing our environment, # with our Alexa event as the body of the HTTP request, as well # as the callback function above. The result will be an iterator # that provides a serialized JSON string for our Alexa response. result = self.app(environ, start_response) try: if not headers: raise AssertionError("start_response() not called by WSGI app") output = b"".join(result) if not headers[0].startswith("2"): raise AssertionError("Non-2xx from app: hdrs={}, body={}".format(headers, output)) # The Lambda handler expects a Python object that can be # serialized as JSON, so we need to take the already serialized # JSON and deserialize it. return json.loads(output) finally: # Per the WSGI spec, we need to invoke the close method if it # is implemented on the result object. if hasattr(result, 'close'): result.close()
[ "def", "run_aws_lambda", "(", "self", ",", "event", ")", ":", "# We are guaranteed to be called by AWS as a Lambda function does not", "# expose a public facing interface.", "self", ".", "app", ".", "config", "[", "'ASK_VERIFY_REQUESTS'", "]", "=", "False", "# Convert an envi...
Invoke the Flask Ask application from an AWS Lambda function handler. Use this method to service AWS Lambda requests from a custom Alexa skill. This method will invoke your Flask application providing a WSGI-compatible environment that wraps the original Alexa event provided to the AWS Lambda handler. Returns the output generated by a Flask Ask application, which should be used as the return value to the AWS Lambda handler function. Example usage: from flask import Flask from flask_ask import Ask, statement app = Flask(__name__) ask = Ask(app, '/') # This function name is what you defined when you create an # AWS Lambda function. By default, AWS calls this function # lambda_handler. def lambda_handler(event, _context): return ask.run_aws_lambda(event) @ask.intent('HelloIntent') def hello(firstname): speech_text = "Hello %s" % firstname return statement(speech_text).simple_card('Hello', speech_text)
[ "Invoke", "the", "Flask", "Ask", "application", "from", "an", "AWS", "Lambda", "function", "handler", "." ]
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/flask_ask/core.py#L585-L683
242,099
johnwheeler/flask-ask
flask_ask/core.py
Ask._parse_timestamp
def _parse_timestamp(timestamp): """ Parse a given timestamp value, raising ValueError if None or Flasey """ if timestamp: try: return aniso8601.parse_datetime(timestamp) except AttributeError: # raised by aniso8601 if raw_timestamp is not valid string # in ISO8601 format try: return datetime.utcfromtimestamp(timestamp) except: # relax the timestamp a bit in case it was sent in millis return datetime.utcfromtimestamp(timestamp/1000) raise ValueError('Invalid timestamp value! Cannot parse from either ISO8601 string or UTC timestamp.')
python
def _parse_timestamp(timestamp): if timestamp: try: return aniso8601.parse_datetime(timestamp) except AttributeError: # raised by aniso8601 if raw_timestamp is not valid string # in ISO8601 format try: return datetime.utcfromtimestamp(timestamp) except: # relax the timestamp a bit in case it was sent in millis return datetime.utcfromtimestamp(timestamp/1000) raise ValueError('Invalid timestamp value! Cannot parse from either ISO8601 string or UTC timestamp.')
[ "def", "_parse_timestamp", "(", "timestamp", ")", ":", "if", "timestamp", ":", "try", ":", "return", "aniso8601", ".", "parse_datetime", "(", "timestamp", ")", "except", "AttributeError", ":", "# raised by aniso8601 if raw_timestamp is not valid string", "# in ISO8601 for...
Parse a given timestamp value, raising ValueError if None or Flasey
[ "Parse", "a", "given", "timestamp", "value", "raising", "ValueError", "if", "None", "or", "Flasey" ]
fe407646ae404a8c90b363c86d5c4c201b6a5580
https://github.com/johnwheeler/flask-ask/blob/fe407646ae404a8c90b363c86d5c4c201b6a5580/flask_ask/core.py#L724-L740