idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
31,300
def get_state ( self ) : state = dict ( sampler = { } , stochastics = { } ) for s in self . _state : state [ 'sampler' ] [ s ] = getattr ( self , s ) for s in self . stochastics : state [ 'stochastics' ] [ s . __name__ ] = s . value return state
Return the sampler s current state in order to restart sampling at a later time .
31,301
def save_state ( self ) : try : self . db . savestate ( self . get_state ( ) ) except : print_ ( 'Warning, unable to save state.' ) print_ ( 'Error message:' ) traceback . print_exc ( )
Tell the database to save the current state of the sampler .
31,302
def restore_sampler_state ( self ) : state = self . db . getstate ( ) or { } sampler_state = state . get ( 'sampler' , { } ) self . __dict__ . update ( sampler_state ) stoch_state = state . get ( 'stochastics' , { } ) for sm in self . stochastics : try : sm . value = stoch_state [ sm . __name__ ] except : warnings . wa...
Restore the state of the sampler and to the state stored in the database .
31,303
def normcdf ( x , log = False ) : y = np . atleast_1d ( x ) . copy ( ) flib . normcdf ( y ) if log : if ( y > 0 ) . all ( ) : return np . log ( y ) return - np . inf return y
Normal cumulative density function .
31,304
def lognormcdf ( x , mu , tau ) : x = np . atleast_1d ( x ) return np . array ( [ 0.5 * ( 1 - flib . derf ( - ( np . sqrt ( tau / 2 ) ) * ( np . log ( y ) - mu ) ) ) for y in x ] )
Log - normal cumulative density function
31,305
def invcdf ( x ) : x_flat = np . ravel ( x ) x_trans = np . array ( [ flib . ppnd16 ( y , 1 ) for y in x_flat ] ) return np . reshape ( x_trans , np . shape ( x ) )
Inverse of normal cumulative density function .
31,306
def trace_generator ( trace , start = 0 , stop = None , step = 1 ) : i = start stop = stop or np . inf size = min ( trace . length ( ) , stop ) while i < size : index = slice ( i , i + 1 ) yield trace . gettrace ( slicing = index ) [ 0 ] i += step
Return a generator returning values from the object s trace .
31,307
def draw_random ( obj , ** kwds ) : while True : for k , v in six . iteritems ( kwds ) : obj . parents [ k ] = v . next ( ) yield obj . random ( )
Draw random variates from obj . random method .
31,308
def rec_setattr ( obj , attr , value ) : attrs = attr . split ( '.' ) setattr ( reduce ( getattr , attrs [ : - 1 ] , obj ) , attrs [ - 1 ] , value )
Set object s attribute . May use dot notation .
31,309
def calc_min_interval ( x , alpha ) : n = len ( x ) cred_mass = 1.0 - alpha interval_idx_inc = int ( np . floor ( cred_mass * n ) ) n_intervals = n - interval_idx_inc interval_width = x [ interval_idx_inc : ] - x [ : n_intervals ] if len ( interval_width ) == 0 : print_ ( 'Too few elements for interval calculation' ) r...
Internal method to determine the minimum interval of a given width
31,310
def quantiles ( x , qlist = ( 2.5 , 25 , 50 , 75 , 97.5 ) ) : x = x . copy ( ) if x . ndim > 1 : sx = sort ( x . T ) . T else : sx = sort ( x ) try : quants = [ sx [ int ( len ( sx ) * q / 100.0 ) ] for q in qlist ] return dict ( zip ( qlist , quants ) ) except IndexError : print_ ( "Too few elements for quantile calcu...
Returns a dictionary of requested quantiles from array
31,311
def coda_output ( pymc_object , name = None , chain = - 1 ) : print_ ( ) print_ ( "Generating CODA output" ) print_ ( '=' * 50 ) if name is None : name = pymc_object . __name__ trace_file = open ( name + '_coda.out' , 'w' ) index_file = open ( name + '_coda.ind' , 'w' ) variables = [ pymc_object ] if hasattr ( pymc_obj...
Generate output files that are compatible with CODA
31,312
def getInput ( ) : input = '' if sys . platform == 'win32' : import msvcrt if msvcrt . kbhit ( ) : input += msvcrt . getch ( ) print_ ( input ) else : time . sleep ( .1 ) else : sock = sys . stdin . fileno ( ) while len ( select . select ( [ sock ] , [ ] , [ ] , 0.1 ) [ 0 ] ) > 0 : input += decode ( os . read ( sock , ...
Read the input buffer without blocking the system .
31,313
def find_generations ( container , with_data = False ) : generations = [ ] generations . append ( set ( ) ) all_children = set ( ) if with_data : stochastics_to_iterate = container . stochastics | container . observed_stochastics else : stochastics_to_iterate = container . stochastics for s in stochastics_to_iterate : ...
A generation is the set of stochastic variables that only has parents in previous generations .
31,314
def append ( nodelist , node , label = None , sep = '_' ) : nname = node . __name__ label = label or len ( nodelist ) ind = nname . rfind ( sep ) node . __name__ = nname [ : ind ] + sep + str ( label ) nodelist . append ( node ) return nodelist
Append function to automate the naming of list elements in Containers .
31,315
def logp_partial_gradient ( self , variable , calculation_set = None ) : if self . verbose > 0 : print_ ( '\t' + self . __name__ + ': logp_partial_gradient accessed.' ) if not ( datatypes . is_continuous ( variable ) and datatypes . is_continuous ( self ) ) : return zeros ( shape ( variable . value ) ) gradient = built...
gets the logp gradient of this deterministic with respect to variable
31,316
def gen_lazy_function ( self ) : if self . _value is None : if self . _random is not None : self . value = self . _random ( ** self . _parents . value ) else : raise ValueError ( 'Stochastic ' + self . __name__ + "'s value initialized to None; no initial value or random method provided." ) arguments = { } arguments . u...
Will be called by Node at instantiation .
31,317
def logp_gradient_contribution ( self , calculation_set = None ) : return self . logp_partial_gradient ( self , calculation_set ) + builtins . sum ( [ child . logp_partial_gradient ( self , calculation_set ) for child in self . children ] )
Calculates the gradient of the joint log posterior with respect to self . Calculation of the log posterior is restricted to the variables in calculation_set .
31,318
def logp_partial_gradient ( self , variable , calculation_set = None ) : if ( calculation_set is None ) or ( self in calculation_set ) : if not datatypes . is_continuous ( variable ) : return zeros ( shape ( variable . value ) ) if variable is self : try : gradient_func = self . _logp_partial_gradients [ 'value' ] exce...
Calculates the partial gradient of the posterior of self with respect to variable . Returns zero if self is not in calculation_set .
31,319
def random ( self ) : if self . _random : r = self . _random ( ** self . parents . value ) else : raise AttributeError ( 'Stochastic ' + self . __name__ + ' does not know how to draw its value, see documentation' ) if self . shape : r = np . reshape ( r , self . shape ) if not self . observed : self . value = r return ...
Draws a new value for a stoch conditional on its parents and returns it .
31,320
def save_sampler ( sampler ) : db = sampler . db fnode = tables . filenode . newnode ( db . _h5file , where = '/' , name = '__sampler__' ) import pickle pickle . dump ( sampler , fnode )
Dumps a sampler into its hdf5 database .
31,321
def restore_sampler ( fname ) : hf = tables . open_file ( fname ) fnode = hf . root . __sampler__ import pickle sampler = pickle . load ( fnode ) return sampler
Creates a new sampler from an hdf5 database .
31,322
def tally ( self , chain ) : self . db . _rows [ chain ] [ self . name ] = self . _getfunc ( )
Adds current value to trace
31,323
def hdf5_col ( self , chain = - 1 ) : return self . db . _tables [ chain ] . colinstances [ self . name ]
Return a pytables column object .
31,324
def savestate ( self , state , chain = - 1 ) : cur_chain = self . _chains [ chain ] if hasattr ( cur_chain , '_state_' ) : cur_chain . _state_ [ 0 ] = state else : s = self . _h5file . create_vlarray ( cur_chain , '_state_' , tables . ObjectAtom ( ) , title = 'The saved state of the sampler' , filters = self . filter )...
Store a dictionnary containing the state of the Model and its StepMethods .
31,325
def _model_trace_description ( self ) : D = { } for name , fun in six . iteritems ( self . model . _funs_to_tally ) : arr = asarray ( fun ( ) ) D [ name ] = tables . Col . from_dtype ( dtype ( ( arr . dtype , arr . shape ) ) ) return D , { }
Return a description of the table and the ObjectAtoms to be created .
31,326
def _check_compatibility ( self ) : stored_descr = self . _file_trace_description ( ) try : for k , v in self . _model_trace_description ( ) : assert ( stored_descr [ k ] [ 0 ] == v [ 0 ] ) except : raise ValueError ( "The objects to tally are incompatible with the objects stored in the file." )
Make sure the next objects to be tallied are compatible with the stored trace .
31,327
def _gettables ( self ) : groups = self . _h5file . list_nodes ( "/" ) if len ( groups ) == 0 : return [ ] else : return [ gr . PyMCsamples for gr in groups if gr . _v_name [ : 5 ] == 'chain' ]
Return a list of hdf5 tables name PyMCsamples .
31,328
def add_attr ( self , name , object , description = '' , chain = - 1 , array = False ) : if not np . isscalar ( chain ) : raise TypeError ( "chain must be a scalar integer." ) table = self . _tables [ chain ] if array is False : table . set_attr ( name , object ) obj = getattr ( table . attrs , name ) else : if descrip...
Add an attribute to the chain .
31,329
def rate ( s = switchpoint , e = early_mean , l = late_mean ) : out = empty ( len ( disasters_array ) ) out [ : s ] = e out [ s : ] = l return out
Concatenate Poisson means
31,330
def regularize_array ( A ) : if not isinstance ( A , np . ndarray ) : A = np . array ( A , dtype = float ) else : A = np . asarray ( A , dtype = float ) if len ( A . shape ) <= 1 : return A . reshape ( - 1 , 1 ) elif A . shape [ - 1 ] > 1 : return A . reshape ( - 1 , A . shape [ - 1 ] ) else : return A
Takes an np . ndarray as an input .
31,331
def import_item ( name ) : package = '.' . join ( name . split ( '.' ) [ 0 : - 1 ] ) obj = name . split ( '.' ) [ - 1 ] if package : module = __import__ ( package , fromlist = [ obj ] ) return module . __dict__ [ obj ] else : return __import__ ( obj )
Useful for importing nested modules such as pymc . gp . cov_funs . isotropic_cov_funs .
31,332
def add_distance_metric ( self , distance_fun_name , distance_fun_module , with_x ) : if self . ampsq_is_diag : kls = covariance_wrapper_with_diag else : kls = covariance_wrapper new_fun = kls ( self . cov_fun_name , self . cov_fun_module , self . extra_cov_params , distance_fun_name , distance_fun_module , with_x = wi...
Takes a function that computes a distance matrix for points in some coordinate system and returns self s covariance function wrapped to use that distance function .
31,333
def func ( self , p ) : self . _set_stochastics ( p ) try : return - 1. * self . logp except ZeroProbability : return Inf
The function that gets passed to the optimizers .
31,334
def gradfunc ( self , p ) : self . _set_stochastics ( p ) for i in xrange ( self . len ) : self . grad [ i ] = self . diff ( i ) return - 1 * self . grad
The gradient - computing function that gets passed to the optimizers if needed .
31,335
def i_logp ( self , index ) : all_relevant_stochastics = set ( ) p , i = self . stochastic_indices [ index ] try : return p . logp + logp_of_set ( p . extended_children ) except ZeroProbability : return - Inf
Evaluates the log - probability of the Markov blanket of a stochastic owning a particular index .
31,336
def grad_and_hess ( self ) : for i in xrange ( self . len ) : di = self . diff ( i ) self . grad [ i ] = di self . hess [ i , i ] = self . diff ( i , 2 ) if i < self . len - 1 : for j in xrange ( i + 1 , self . len ) : dij = self . diff2 ( i , j ) self . hess [ i , j ] = dij self . hess [ j , i ] = dij
Computes self s gradient and Hessian . Used if the optimization method for a NormApprox doesn t use gradients and hessians for instance fmin .
31,337
def hessfunc ( self , p ) : self . _set_stochastics ( p ) for i in xrange ( self . len ) : di = self . diff ( i ) self . hess [ i , i ] = self . diff ( i , 2 ) if i < self . len - 1 : for j in xrange ( i + 1 , self . len ) : dij = self . diff2 ( i , j ) self . hess [ i , j ] = dij self . hess [ j , i ] = dij return - 1...
The Hessian function that will be passed to the optimizer if needed .
31,338
def makeRequests ( callable_ , args_list , callback = None , exc_callback = _handle_thread_exception ) : requests = [ ] for item in args_list : if isinstance ( item , tuple ) : requests . append ( WorkRequest ( callable_ , item [ 0 ] , item [ 1 ] , callback = callback , exc_callback = exc_callback ) ) else : requests ....
Create several work requests for same callable with different arguments .
31,339
def thread_partition_array ( x ) : "Partition work arrays for multithreaded addition and multiplication" n_threads = get_threadpool_size ( ) if len ( x . shape ) > 1 : maxind = x . shape [ 1 ] else : maxind = x . shape [ 0 ] bounds = np . array ( np . linspace ( 0 , maxind , n_threads + 1 ) , dtype = 'int' ) cmin = bou...
Partition work arrays for multithreaded addition and multiplication
31,340
def run ( self ) : while True : if self . _dismissed . isSet ( ) : break request = self . _requests_queue . get ( ) if self . _dismissed . isSet ( ) : self . _requests_queue . put ( request ) break try : result = request . callable ( * request . args , ** request . kwds ) if request . callback : request . callback ( re...
Repeatedly process the job queue until told to exit .
31,341
def createWorkers ( self , num_workers ) : for i in range ( num_workers ) : self . workers . append ( WorkerThread ( self . _requests_queue ) )
Add num_workers worker threads to the pool .
31,342
def dismissWorkers ( self , num_workers ) : for i in range ( min ( num_workers , len ( self . workers ) ) ) : worker = self . workers . pop ( ) worker . dismiss ( )
Tell num_workers worker threads to quit after their current task .
31,343
def setNumWorkers ( self , num_workers ) : cur_num = len ( self . workers ) if cur_num > num_workers : self . dismissWorkers ( cur_num - num_workers ) else : self . createWorkers ( num_workers - cur_num )
Set number of worker threads to num_workers
31,344
def putRequest ( self , request , block = True , timeout = 0 ) : self . _requests_queue . put ( request , block , timeout ) self . workRequests [ request . requestID ] = request
Put work request into work queue and save its id for later .
31,345
def zip ( value = data , mu = mu , psi = psi ) : like = 0.0 for x in value : if not x : like += np . log ( ( 1. - psi ) + psi * np . exp ( - mu ) ) else : like += np . log ( psi ) + poisson_like ( x , mu ) return like
Zero - inflated Poisson likelihood
31,346
def plot ( data , name , format = 'png' , suffix = '' , path = './' , common_scale = True , datarange = ( None , None ) , fontmap = None , verbose = 1 , new = True , last = True , rows = 1 , num = 1 ) : if fontmap is None : fontmap = { 1 : 10 , 2 : 8 , 3 : 6 , 4 : 5 , 5 : 4 } if ndim ( data ) == 1 : if verbose > 0 : pr...
Generates summary plots for nodes of a given PyMC object .
31,347
def histogram ( data , name , bins = 'sturges' , datarange = ( None , None ) , format = 'png' , suffix = '' , path = './' , rows = 1 , columns = 1 , num = 1 , last = True , fontmap = None , verbose = 1 ) : try : if fontmap is None : fontmap = { 1 : 10 , 2 : 8 , 3 : 6 , 4 : 5 , 5 : 4 } standalone = rows == 1 and columns...
Generates histogram from an array of data .
31,348
def trace ( data , name , format = 'png' , datarange = ( None , None ) , suffix = '' , path = './' , rows = 1 , columns = 1 , num = 1 , last = True , fontmap = None , verbose = 1 ) : if fontmap is None : fontmap = { 1 : 10 , 2 : 8 , 3 : 6 , 4 : 5 , 5 : 4 } standalone = rows == 1 and columns == 1 and num == 1 if standal...
Generates trace plot from an array of data .
31,349
def gof_plot ( simdata , trueval , name = None , bins = None , format = 'png' , suffix = '-gof' , path = './' , fontmap = None , verbose = 0 ) : if fontmap is None : fontmap = { 1 : 10 , 2 : 8 , 3 : 6 , 4 : 5 , 5 : 4 } if not isinstance ( simdata , ndarray ) : simdata = simdata . trace ( ) if ndim ( trueval ) == 1 and ...
Plots histogram of replicated data indicating the location of the observed data
31,350
def load ( dbname ) : db = Database ( dbname ) tables = get_table_list ( db . cur ) chains = 0 for name in tables : db . _traces [ name ] = Trace ( name = name , db = db ) db . _traces [ name ] . _shape = get_shape ( db . cur , name ) setattr ( db , name , db . _traces [ name ] ) db . cur . execute ( 'SELECT MAX(trace)...
Load an existing SQLite database .
31,351
def get_shape ( cursor , name ) : cursor . execute ( 'select * from [%s]' % name ) inds = cursor . description [ - 1 ] [ 0 ] [ 1 : ] . split ( '_' ) return tuple ( [ int ( i ) for i in inds ] )
Return the shape of the table name .
31,352
def close ( self , * args , ** kwds ) : self . cur . close ( ) self . commit ( ) self . DB . close ( )
Close database .
31,353
def create_nonimplemented_method ( op_name , klass ) : def new_method ( self , * args ) : raise NotImplementedError ( 'Special method %s has not been implemented for PyMC variables.' % op_name ) new_method . __name__ = '__' + op_name + '__' setattr ( klass , new_method . __name__ , UnboundMethodType ( new_method , None...
Creates a new method that raises NotImplementedError .
31,354
def remove_step_method ( self , step_method ) : try : for s in step_method . stochastics : self . step_method_dict [ s ] . remove ( step_method ) if hasattr ( self , "step_methods" ) : self . step_methods . discard ( step_method ) self . _sm_assigned = False except AttributeError : for sm in step_method : self . remove...
Removes a step method .
31,355
def assign_step_methods ( self , verbose = - 1 , draw_from_prior_when_possible = True ) : if not self . _sm_assigned : if draw_from_prior_when_possible : last_gen = set ( [ ] ) for s in self . stochastics - self . observed_stochastics : if s . _random is not None : if len ( s . extended_children ) == 0 : last_gen . add...
Make sure every stochastic variable has a step method . If not assign a step method from the registry .
31,356
def tune ( self ) : if self . verbose > 0 : print_ ( '\tTuning at iteration' , self . _current_iter ) tuning_count = 0 for step_method in self . step_methods : verbose = self . verbose if step_method . verbose > - 1 : verbose = step_method . verbose tuning_count += step_method . tune ( verbose = self . verbose ) if ver...
Tell all step methods to tune themselves .
31,357
def get_state ( self ) : self . step_methods = set ( ) for s in self . stochastics : self . step_methods |= set ( self . step_method_dict [ s ] ) state = Sampler . get_state ( self ) state [ 'step_methods' ] = { } for sm in self . step_methods : state [ 'step_methods' ] [ sm . _id ] = sm . current_state ( ) . copy ( ) ...
Return the sampler and step methods current state in order to restart sampling at a later time .
31,358
def _calc_dic ( self ) : mean_deviance = np . mean ( self . db . trace ( 'deviance' ) ( ) , axis = 0 ) for stochastic in self . stochastics : try : mean_value = np . mean ( self . db . trace ( stochastic . __name__ ) ( ) , axis = 0 ) stochastic . value = mean_value except KeyError : print_ ( "No trace available for %s....
Calculates deviance information Criterion
31,359
def stochastic_from_data ( name , data , lower = - np . inf , upper = np . inf , value = None , observed = False , trace = True , verbose = - 1 , debug = False ) : pdf = gaussian_kde ( data ) lower_tail = upper_tail = 0. if lower > - np . inf : lower_tail = pdf . integrate_box ( - np . inf , lower ) if upper < np . inf...
Return a Stochastic subclass made from arbitrary data .
31,360
def randomwrap ( func ) : refargs , defaults = utils . get_signature ( func ) npos = len ( refargs ) - len ( defaults ) nkwds = len ( defaults ) mv = func . __name__ [ 1 : ] in mv_continuous_distributions + mv_discrete_distributions if not mv : return func def wrapper ( * args , ** kwds ) : n = len ( args ) if nkwds > ...
Decorator for random value generators
31,361
def constrain ( value , lower = - np . Inf , upper = np . Inf , allow_equal = False ) : ok = flib . constrain ( value , lower , upper , allow_equal ) if ok == 0 : raise ZeroProbability
Apply interval constraint on stochastic value .
31,362
def expand_triangular ( X , k ) : X = X . tolist ( ) Y = np . asarray ( [ [ 0 ] * i + X [ i * k - ( i * ( i - 1 ) ) / 2 : i * k + ( k - i ) ] for i in range ( k ) ] ) for i in range ( k ) : for j in range ( k ) : Y [ j , i ] = Y [ i , j ] return Y
Expand flattened triangular matrix .
31,363
def rarlognormal ( a , sigma , rho , size = 1 ) : R f = utils . ar1 if np . isscalar ( a ) : r = f ( rho , 0 , sigma , size ) else : n = len ( a ) r = [ f ( rho , 0 , sigma , n ) for i in range ( size ) ] if size == 1 : r = r [ 0 ] return a * np . exp ( r )
R Autoregressive normal random variates .
31,364
def arlognormal_like ( x , a , sigma , rho ) : R return flib . arlognormal ( x , np . log ( a ) , sigma , rho , beta = 1 )
R Autoregressive lognormal log - likelihood .
31,365
def rbeta ( alpha , beta , size = None ) : from scipy . stats . distributions import beta as sbeta return sbeta . ppf ( np . random . random ( size ) , alpha , beta )
Random beta variates .
31,366
def rbinomial ( n , p , size = None ) : if not size : size = None return np . random . binomial ( np . ravel ( n ) , np . ravel ( p ) , size )
Random binomial variates .
31,367
def rbetabin ( alpha , beta , n , size = None ) : phi = np . random . beta ( alpha , beta , size ) return np . random . binomial ( n , phi )
Random beta - binomial variates .
31,368
def rcategorical ( p , size = None ) : out = flib . rcat ( p , np . random . random ( size = size ) ) if sum ( out . shape ) == 1 : return out . squeeze ( ) else : return out
Categorical random variates .
31,369
def categorical_like ( x , p ) : R p = np . atleast_2d ( p ) if np . any ( abs ( np . sum ( p , 1 ) - 1 ) > 0.0001 ) : print_ ( "Probabilities in categorical_like sum to" , np . sum ( p , 1 ) ) return flib . categorical ( np . array ( x ) . astype ( int ) , p )
R Categorical log - likelihood . The most general discrete distribution .
31,370
def rcauchy ( alpha , beta , size = None ) : return alpha + beta * np . tan ( pi * random_number ( size ) - pi / 2.0 )
Returns Cauchy random variates .
31,371
def degenerate_like ( x , k ) : R x = np . atleast_1d ( x ) return sum ( np . log ( [ i == k for i in x ] ) )
R Degenerate log - likelihood .
31,372
def rdirichlet ( theta , size = 1 ) : gammas = np . vstack ( [ rgamma ( theta , 1 ) for i in xrange ( size ) ] ) if size > 1 and np . size ( theta ) > 1 : return ( gammas . T / gammas . sum ( 1 ) ) [ : - 1 ] . T elif np . size ( theta ) > 1 : return ( gammas [ 0 ] / gammas [ 0 ] . sum ( ) ) [ : - 1 ] else : return 1.
Dirichlet random variates .
31,373
def dirichlet_like ( x , theta ) : R x = np . atleast_2d ( x ) theta = np . atleast_2d ( theta ) if ( np . shape ( x ) [ - 1 ] + 1 ) != np . shape ( theta ) [ - 1 ] : raise ValueError ( 'The dimension of x in dirichlet_like must be k-1.' ) return flib . dirichlet ( x , theta )
R Dirichlet log - likelihood .
31,374
def rexponweib ( alpha , k , loc = 0 , scale = 1 , size = None ) : q = np . random . uniform ( size = size ) r = flib . exponweib_ppf ( q , alpha , k ) return loc + r * scale
Random exponentiated Weibull variates .
31,375
def exponweib_like ( x , alpha , k , loc = 0 , scale = 1 ) : R return flib . exponweib ( x , alpha , k , loc , scale )
R Exponentiated Weibull log - likelihood .
31,376
def rgamma ( alpha , beta , size = None ) : return np . random . gamma ( shape = alpha , scale = 1. / beta , size = size )
Random gamma variates .
31,377
def gev_expval ( xi , mu = 0 , sigma = 1 ) : return mu - ( sigma / xi ) + ( sigma / xi ) * flib . gamfun ( 1 - xi )
Expected value of generalized extreme value distribution .
31,378
def gev_like ( x , xi , mu = 0 , sigma = 1 ) : R return flib . gev ( x , xi , mu , sigma )
R Generalized Extreme Value log - likelihood
31,379
def rhalf_cauchy ( alpha , beta , size = None ) : return abs ( alpha + beta * np . tan ( pi * random_number ( size ) - pi / 2.0 ) )
Returns half - Cauchy random variates .
31,380
def half_cauchy_like ( x , alpha , beta ) : R x = np . atleast_1d ( x ) if sum ( x . ravel ( ) < 0 ) : return - inf return flib . cauchy ( x , alpha , beta ) + len ( x ) * np . log ( 2 )
R Half - Cauchy log - likelihood . Simply the absolute value of Cauchy .
31,381
def rhalf_normal ( tau , size = None ) : return abs ( np . random . normal ( 0 , np . sqrt ( 1 / tau ) , size ) )
Random half - normal variates .
31,382
def rhypergeometric ( n , m , N , size = None ) : if n == 0 : return np . zeros ( size , dtype = int ) elif n == N : out = np . empty ( size , dtype = int ) out . fill ( m ) return out return np . random . hypergeometric ( n , N - n , m , size )
Returns hypergeometric random variates .
31,383
def hypergeometric_like ( x , n , m , N ) : R return flib . hyperg ( x , n , m , N )
R Hypergeometric log - likelihood .
31,384
def rlogistic ( mu , tau , size = None ) : u = np . random . random ( size ) return mu + np . log ( u / ( 1 - u ) ) / tau
Logistic random variates .
31,385
def rlognormal ( mu , tau , size = None ) : return np . random . lognormal ( mu , np . sqrt ( 1. / tau ) , size )
Return random lognormal variates .
31,386
def rmultinomial ( n , p , size = None ) : if len ( np . shape ( p ) ) == 1 : return np . random . multinomial ( n , p , size ) if np . isscalar ( n ) : n = n * np . ones ( np . shape ( p ) [ 0 ] , dtype = np . int ) out = np . empty ( np . shape ( p ) ) for i in xrange ( np . shape ( p ) [ 0 ] ) : out [ i , : ] = np ....
Random multinomial variates .
31,387
def multinomial_like ( x , n , p ) : R x = np . atleast_2d ( x ) p = np . atleast_2d ( p ) return flib . multinomial ( x , n , p )
R Multinomial log - likelihood .
31,388
def rmultivariate_hypergeometric ( n , m , size = None ) : N = len ( m ) urn = np . repeat ( np . arange ( N ) , m ) if size : draw = np . array ( [ [ urn [ i ] for i in np . random . permutation ( len ( urn ) ) [ : n ] ] for j in range ( size ) ] ) r = [ [ np . sum ( draw [ j ] == i ) for i in range ( len ( m ) ) ] fo...
Random multivariate hypergeometric variates .
31,389
def multivariate_hypergeometric_expval ( n , m ) : m = np . asarray ( m , float ) return n * ( m / m . sum ( ) )
Expected value of multivariate hypergeometric distribution .
31,390
def mv_normal_like ( x , mu , tau ) : R if len ( np . shape ( x ) ) > 1 : return np . sum ( [ flib . prec_mvnorm ( r , mu , tau ) for r in x ] ) else : return flib . prec_mvnorm ( x , mu , tau )
R Multivariate normal log - likelihood
31,391
def mv_normal_cov_like ( x , mu , C ) : R if len ( np . shape ( x ) ) > 1 : return np . sum ( [ flib . cov_mvnorm ( r , mu , C ) for r in x ] ) else : return flib . cov_mvnorm ( x , mu , C )
R Multivariate normal log - likelihood parameterized by a covariance matrix .
31,392
def mv_normal_chol_like ( x , mu , sig ) : R if len ( np . shape ( x ) ) > 1 : return np . sum ( [ flib . chol_mvnorm ( r , mu , sig ) for r in x ] ) else : return flib . chol_mvnorm ( x , mu , sig )
R Multivariate normal log - likelihood .
31,393
def rnegative_binomial ( mu , alpha , size = None ) : mu = np . asarray ( mu , dtype = float ) pois_mu = np . random . gamma ( alpha , mu / alpha , size ) return np . random . poisson ( pois_mu , size )
Random negative binomial variates .
31,394
def negative_binomial_like ( x , mu , alpha ) : R alpha = np . array ( alpha ) if ( alpha > 1e10 ) . any ( ) : if ( alpha > 1e10 ) . all ( ) : return flib . poisson ( x , mu ) big = alpha > 1e10 return flib . poisson ( x [ big ] , mu [ big ] ) + flib . negbin2 ( x [ big - True ] , mu [ big - True ] , alpha [ big - True...
R Negative binomial log - likelihood .
31,395
def rnormal ( mu , tau , size = None ) : return np . random . normal ( mu , 1. / np . sqrt ( tau ) , size )
Random normal variates .
31,396
def rvon_mises ( mu , kappa , size = None ) : return ( np . random . mtrand . vonmises ( mu , kappa , size ) + np . pi ) % ( 2. * np . pi ) - np . pi
Random von Mises variates .
31,397
def rtruncated_pareto ( alpha , m , b , size = None ) : u = random_number ( size ) return ( - ( u * b ** alpha - u * m ** alpha - b ** alpha ) / ( b ** alpha * m ** alpha ) ) ** ( - 1. / alpha )
Random bounded Pareto variates .
31,398
def truncated_pareto_expval ( alpha , m , b ) : if alpha <= 1 : return inf part1 = ( m ** alpha ) / ( 1. - ( m / b ) ** alpha ) part2 = 1. * alpha / ( alpha - 1 ) part3 = ( 1. / ( m ** ( alpha - 1 ) ) - 1. / ( b ** ( alpha - 1. ) ) ) return part1 * part2 * part3
Expected value of truncated Pareto distribution .
31,399
def rtruncated_poisson ( mu , k , size = None ) : try : m = max ( 0 , np . floor ( k - mu ) ) except ( TypeError , ValueError ) : return np . array ( [ rtruncated_poisson ( x , i ) for x , i in zip ( mu , np . resize ( k , np . size ( mu ) ) ) ] ) . squeeze ( ) k -= 1 C = np . exp ( flib . factln ( k + 1 ) - flib . fac...
Random truncated Poisson variates with minimum value k generated using rejection sampling .