idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
243,700
def _tabulate ( self , info_dict , col_widths , changed , order , bounds_dict ) : contents , tail = [ ] , [ ] column_set = set ( k for row in info_dict . values ( ) for k in row ) columns = [ col for col in order if col in column_set ] title_row = [ ] # Generate the column headings for i , col in enumerate ( columns ) : width = col_widths [ col ] + 2 col = col . capitalize ( ) formatted = col . ljust ( width ) if i == 0 else col . center ( width ) title_row . append ( formatted ) contents . append ( blue % '' . join ( title_row ) + "\n" ) # Format the table rows for row in sorted ( info_dict ) : row_list = [ ] info = info_dict [ row ] for i , col in enumerate ( columns ) : width = col_widths [ col ] + 2 val = info [ col ] if ( col in info ) else '' formatted = val . ljust ( width ) if i == 0 else val . center ( width ) if col == 'bounds' and bounds_dict . get ( row , False ) : ( mark_lbound , mark_ubound ) = bounds_dict [ row ] lval , uval = formatted . rsplit ( ',' ) lspace , lstr = lval . rsplit ( '(' ) ustr , uspace = uval . rsplit ( ')' ) lbound = lspace + '(' + ( cyan % lstr ) if mark_lbound else lval ubound = ( cyan % ustr ) + ')' + uspace if mark_ubound else uval formatted = "%s,%s" % ( lbound , ubound ) row_list . append ( formatted ) row_text = '' . join ( row_list ) if row in changed : row_text = red % row_text contents . append ( row_text ) return '\n' . join ( contents + tail )
Returns the supplied information as a table suitable for printing or paging .
442
14
243,701
def is_ordered_dict ( d ) : py3_ordered_dicts = ( sys . version_info . major == 3 ) and ( sys . version_info . minor >= 6 ) vanilla_odicts = ( sys . version_info . major > 3 ) or py3_ordered_dicts return isinstance ( d , ( OrderedDict ) ) or ( vanilla_odicts and isinstance ( d , dict ) )
Predicate checking for ordered dictionaries . OrderedDict is always ordered and vanilla Python dictionaries are ordered for Python 3 . 6 +
93
28
243,702
def named_objs ( objlist , namesdict = None ) : objs = OrderedDict ( ) if namesdict is not None : objtoname = { hashable ( v ) : k for k , v in namesdict . items ( ) } for obj in objlist : if namesdict is not None and hashable ( obj ) in objtoname : k = objtoname [ hashable ( obj ) ] elif hasattr ( obj , "name" ) : k = obj . name elif hasattr ( obj , '__name__' ) : k = obj . __name__ else : k = as_unicode ( obj ) objs [ k ] = obj return objs
Given a list of objects returns a dictionary mapping from string name for the object to the object itself . Accepts an optional name obj dictionary which will override any other name if that item is present in the dictionary .
149
42
243,703
def guess_param_types ( * * kwargs ) : params = { } for k , v in kwargs . items ( ) : kws = dict ( default = v , constant = True ) if isinstance ( v , Parameter ) : params [ k ] = v elif isinstance ( v , dt_types ) : params [ k ] = Date ( * * kws ) elif isinstance ( v , bool ) : params [ k ] = Boolean ( * * kws ) elif isinstance ( v , int ) : params [ k ] = Integer ( * * kws ) elif isinstance ( v , float ) : params [ k ] = Number ( * * kws ) elif isinstance ( v , str ) : params [ k ] = String ( * * kws ) elif isinstance ( v , dict ) : params [ k ] = Dict ( * * kws ) elif isinstance ( v , tuple ) : if all ( _is_number ( el ) for el in v ) : params [ k ] = NumericTuple ( * * kws ) elif all ( isinstance ( el . dt_types ) for el in v ) and len ( v ) == 2 : params [ k ] = DateRange ( * * kws ) else : params [ k ] = Tuple ( * * kws ) elif isinstance ( v , list ) : params [ k ] = List ( * * kws ) elif isinstance ( v , np . ndarray ) : params [ k ] = Array ( * * kws ) else : from pandas import DataFrame as pdDFrame from pandas import Series as pdSeries if isinstance ( v , pdDFrame ) : params [ k ] = DataFrame ( * * kws ) elif isinstance ( v , pdSeries ) : params [ k ] = Series ( * * kws ) else : params [ k ] = Parameter ( * * kws ) return params
Given a set of keyword literals promote to the appropriate parameter type based on some simple heuristics .
428
21
243,704
def guess_bounds ( params , * * overrides ) : guessed = { } for name , p in params . items ( ) : new_param = copy . copy ( p ) if isinstance ( p , ( Integer , Number ) ) : if name in overrides : minv , maxv = overrides [ name ] else : minv , maxv , _ = _get_min_max_value ( None , None , value = p . default ) new_param . bounds = ( minv , maxv ) guessed [ name ] = new_param return guessed
Given a dictionary of Parameter instances return a corresponding set of copies with the bounds appropriately set .
121
19
243,705
def _initialize_generator ( self , gen , obj = None ) : # CEBALERT: use a dictionary to hold these things. if hasattr ( obj , "_Dynamic_time_fn" ) : gen . _Dynamic_time_fn = obj . _Dynamic_time_fn gen . _Dynamic_last = None # CEB: I'd use None for this, except can't compare a fixedpoint # number with None (e.g. 1>None but FixedPoint(1)>None can't be done) gen . _Dynamic_time = - 1 gen . _saved_Dynamic_last = [ ] gen . _saved_Dynamic_time = [ ]
Add last time and last value attributes to the generator .
147
11
243,706
def _produce_value ( self , gen , force = False ) : if hasattr ( gen , "_Dynamic_time_fn" ) : time_fn = gen . _Dynamic_time_fn else : time_fn = self . time_fn if ( time_fn is None ) or ( not self . time_dependent ) : value = produce_value ( gen ) gen . _Dynamic_last = value else : time = time_fn ( ) if force or time != gen . _Dynamic_time : value = produce_value ( gen ) gen . _Dynamic_last = value gen . _Dynamic_time = time else : value = gen . _Dynamic_last return value
Return a value from gen .
145
6
243,707
def _inspect ( self , obj , objtype = None ) : gen = super ( Dynamic , self ) . __get__ ( obj , objtype ) if hasattr ( gen , '_Dynamic_last' ) : return gen . _Dynamic_last else : return gen
Return the last generated value for this parameter .
58
9
243,708
def _force ( self , obj , objtype = None ) : gen = super ( Dynamic , self ) . __get__ ( obj , objtype ) if hasattr ( gen , '_Dynamic_last' ) : return self . _produce_value ( gen , force = True ) else : return gen
Force a new value to be generated and return it .
65
11
243,709
def set_in_bounds ( self , obj , val ) : if not callable ( val ) : bounded_val = self . crop_to_bounds ( val ) else : bounded_val = val super ( Number , self ) . __set__ ( obj , bounded_val )
Set to the given value but cropped to be within the legal bounds . All objects are accepted and no exceptions will be raised . See crop_to_bounds for details on how cropping is done .
62
41
243,710
def crop_to_bounds ( self , val ) : # Currently, values outside the bounds are silently cropped to # be inside the bounds; it may be appropriate to add a warning # in such cases. if _is_number ( val ) : if self . bounds is None : return val vmin , vmax = self . bounds if vmin is not None : if val < vmin : return vmin if vmax is not None : if val > vmax : return vmax elif self . allow_None and val is None : return val else : # non-numeric value sent in: reverts to default value return self . default return val
Return the given value cropped to be within the hard bounds for this parameter .
138
15
243,711
def _validate ( self , val ) : if not self . check_on_set : self . _ensure_value_is_in_objects ( val ) return if not ( val in self . objects or ( self . allow_None and val is None ) ) : # CEBALERT: can be called before __init__ has called # super's __init__, i.e. before attrib_name has been set. try : attrib_name = self . name except AttributeError : attrib_name = "" items = [ ] limiter = ']' length = 0 for item in self . objects : string = str ( item ) length += len ( string ) if length < 200 : items . append ( string ) else : limiter = ', ...]' break items = '[' + ', ' . join ( items ) + limiter raise ValueError ( "%s not in Parameter %s's list of possible objects, " "valid options include %s" % ( val , attrib_name , items ) )
val must be None or one of the objects in self . objects .
219
14
243,712
def _ensure_value_is_in_objects ( self , val ) : if not ( val in self . objects ) : self . objects . append ( val )
Make sure that the provided value is present on the objects list . Subclasses can override if they support multiple items on a list to check each item instead .
36
31
243,713
def _validate ( self , val ) : if isinstance ( self . class_ , tuple ) : class_name = ( '(%s)' % ', ' . join ( cl . __name__ for cl in self . class_ ) ) else : class_name = self . class_ . __name__ if self . is_instance : if not ( isinstance ( val , self . class_ ) ) and not ( val is None and self . allow_None ) : raise ValueError ( "Parameter '%s' value must be an instance of %s, not '%s'" % ( self . name , class_name , val ) ) else : if not ( val is None and self . allow_None ) and not ( issubclass ( val , self . class_ ) ) : raise ValueError ( "Parameter '%s' must be a subclass of %s, not '%s'" % ( val . __name__ , class_name , val . __class__ . __name__ ) )
val must be None an instance of self . class_ if self . is_instance = True or a subclass of self_class if self . is_instance = False
214
34
243,714
def get_range ( self ) : classes = concrete_descendents ( self . class_ ) d = OrderedDict ( ( name , class_ ) for name , class_ in classes . items ( ) ) if self . allow_None : d [ 'None' ] = None return d
Return the possible types for this parameter s value .
63
10
243,715
def _validate ( self , val ) : if self . allow_None and val is None : return if not isinstance ( val , list ) : raise ValueError ( "List '%s' must be a list." % ( self . name ) ) if self . bounds is not None : min_length , max_length = self . bounds l = len ( val ) if min_length is not None and max_length is not None : if not ( min_length <= l <= max_length ) : raise ValueError ( "%s: list length must be between %s and %s (inclusive)" % ( self . name , min_length , max_length ) ) elif min_length is not None : if not min_length <= l : raise ValueError ( "%s: list length must be at least %s." % ( self . name , min_length ) ) elif max_length is not None : if not l <= max_length : raise ValueError ( "%s: list length must be at most %s." % ( self . name , max_length ) ) self . _check_type ( val )
Checks that the list is of the right length and has the right contents . Otherwise an exception is raised .
241
22
243,716
def logging_level ( level ) : level = level . upper ( ) levels = [ DEBUG , INFO , WARNING , ERROR , CRITICAL , VERBOSE ] level_names = [ 'DEBUG' , 'INFO' , 'WARNING' , 'ERROR' , 'CRITICAL' , 'VERBOSE' ] if level not in level_names : raise Exception ( "Level %r not in %r" % ( level , levels ) ) param_logger = get_logger ( ) logging_level = param_logger . getEffectiveLevel ( ) param_logger . setLevel ( levels [ level_names . index ( level ) ] ) try : yield None finally : param_logger . setLevel ( logging_level )
Temporarily modify param s logging level .
157
9
243,717
def batch_watch ( parameterized , run = True ) : BATCH_WATCH = parameterized . param . _BATCH_WATCH parameterized . param . _BATCH_WATCH = True try : yield finally : parameterized . param . _BATCH_WATCH = BATCH_WATCH if run and not BATCH_WATCH : parameterized . param . _batch_call_watchers ( )
Context manager to batch watcher events on a parameterized object . The context manager will queue any events triggered by setting a parameter on the supplied parameterized object and dispatch them all at once when the context manager exits . If run = False the queued events are not dispatched and should be processed manually .
83
60
243,718
def get_all_slots ( class_ ) : # A subclass's __slots__ attribute does not contain slots defined # in its superclass (the superclass' __slots__ end up as # attributes of the subclass). all_slots = [ ] parent_param_classes = [ c for c in classlist ( class_ ) [ 1 : : ] ] for c in parent_param_classes : if hasattr ( c , '__slots__' ) : all_slots += c . __slots__ return all_slots
Return a list of slot names for slots defined in class_ and its superclasses .
118
17
243,719
def get_occupied_slots ( instance ) : return [ slot for slot in get_all_slots ( type ( instance ) ) if hasattr ( instance , slot ) ]
Return a list of slots for which values have been set .
38
12
243,720
def all_equal ( arg1 , arg2 ) : if all ( hasattr ( el , '_infinitely_iterable' ) for el in [ arg1 , arg2 ] ) : return arg1 == arg2 try : return all ( a1 == a2 for a1 , a2 in zip ( arg1 , arg2 ) ) except TypeError : return arg1 == arg2
Return a single boolean for arg1 == arg2 even for numpy arrays using element - wise comparison .
83
21
243,721
def output ( func , * output , * * kw ) : if output : outputs = [ ] for i , out in enumerate ( output ) : i = i if len ( output ) > 1 else None if isinstance ( out , tuple ) and len ( out ) == 2 and isinstance ( out [ 0 ] , str ) : outputs . append ( out + ( i , ) ) elif isinstance ( out , str ) : outputs . append ( ( out , Parameter ( ) , i ) ) else : outputs . append ( ( None , out , i ) ) elif kw : py_major = sys . version_info . major py_minor = sys . version_info . minor if ( py_major < 3 or ( py_major == 3 and py_minor < 6 ) ) and len ( kw ) > 1 : raise ValueError ( 'Multiple output declaration using keywords ' 'only supported in Python >= 3.6.' ) # (requires keywords to be kept ordered, which was not true in previous versions) outputs = [ ( name , otype , i if len ( kw ) > 1 else None ) for i , ( name , otype ) in enumerate ( kw . items ( ) ) ] else : outputs = [ ( None , Parameter ( ) , None ) ] names , processed = [ ] , [ ] for name , otype , i in outputs : if isinstance ( otype , type ) : if issubclass ( otype , Parameter ) : otype = otype ( ) else : from . import ClassSelector otype = ClassSelector ( class_ = otype ) elif isinstance ( otype , tuple ) and all ( isinstance ( t , type ) for t in otype ) : from . import ClassSelector otype = ClassSelector ( class_ = otype ) if not isinstance ( otype , Parameter ) : raise ValueError ( 'output type must be declared with a Parameter class, ' 'instance or a Python object type.' ) processed . append ( ( name , otype , i ) ) names . append ( name ) if len ( set ( names ) ) != len ( names ) : raise ValueError ( 'When declaring multiple outputs each value ' 'must be unique.' ) _dinfo = getattr ( func , '_dinfo' , { } ) _dinfo . update ( { 'outputs' : processed } ) @ wraps ( func ) def _output ( * args , * * kw ) : return func ( * args , * * kw ) _output . _dinfo = _dinfo return _output
output allows annotating a method on a Parameterized class to declare that it returns an output of a specific type . The outputs of a Parameterized class can be queried using the Parameterized . param . outputs method . By default the output will inherit the method name but a custom name can be declared by expressing the Parameter type using a keyword argument . Declaring multiple return types using keywords is only supported in Python > = 3 . 6 .
557
92
243,722
def _setup_params ( self_ , * * params ) : self = self_ . param . self ## Deepcopy all 'instantiate=True' parameters # (build a set of names first to avoid redundantly instantiating # a later-overridden parent class's parameter) params_to_instantiate = { } for class_ in classlist ( type ( self ) ) : if not issubclass ( class_ , Parameterized ) : continue for ( k , v ) in class_ . __dict__ . items ( ) : # (avoid replacing name with the default of None) if isinstance ( v , Parameter ) and v . instantiate and k != "name" : params_to_instantiate [ k ] = v for p in params_to_instantiate . values ( ) : self . param . _instantiate_param ( p ) ## keyword arg setting for name , val in params . items ( ) : desc = self . __class__ . get_param_descriptor ( name ) [ 0 ] # pylint: disable-msg=E1101 if not desc : self . param . warning ( "Setting non-parameter attribute %s=%s using a mechanism intended only for parameters" , name , val ) # i.e. if not desc it's setting an attribute in __dict__, not a Parameter setattr ( self , name , val )
Initialize default and keyword parameter values .
301
8
243,723
def deprecate ( cls , fn ) : def inner ( * args , * * kwargs ) : if cls . _disable_stubs : raise AssertionError ( 'Stubs supporting old API disabled' ) elif cls . _disable_stubs is None : pass elif cls . _disable_stubs is False : get_logger ( name = args [ 0 ] . __class__ . __name__ ) . log ( WARNING , 'Use method %r via param namespace ' % fn . __name__ ) return fn ( * args , * * kwargs ) inner . __doc__ = "Inspect .param.%s method for the full docstring" % fn . __name__ return inner
Decorator to issue warnings for API moving onto the param namespace and to add a docstring directing people to the appropriate method .
158
26
243,724
def print_param_defaults ( self_ ) : cls = self_ . cls for key , val in cls . __dict__ . items ( ) : if isinstance ( val , Parameter ) : print ( cls . __name__ + '.' + key + '=' + repr ( val . default ) )
Print the default values of all cls s Parameters .
70
11
243,725
def set_default ( self_ , param_name , value ) : cls = self_ . cls setattr ( cls , param_name , value )
Set the default value of param_name .
35
9
243,726
def _add_parameter ( self_ , param_name , param_obj ) : # CEBALERT: can't we just do # setattr(cls,param_name,param_obj)? The metaclass's # __setattr__ is actually written to handle that. (Would also # need to do something about the params() cache. That cache # is a pain, but it definitely improved the startup time; it # would be worthwhile making sure no method except for one # "add_param()" method has to deal with it (plus any future # remove_param() method.) cls = self_ . cls type . __setattr__ ( cls , param_name , param_obj ) ParameterizedMetaclass . _initialize_parameter ( cls , param_name , param_obj ) # delete cached params() try : delattr ( cls , '_%s__params' % cls . __name__ ) except AttributeError : pass
Add a new Parameter object into this object s class .
212
12
243,727
def set_param ( self_ , * args , * * kwargs ) : BATCH_WATCH = self_ . self_or_cls . param . _BATCH_WATCH self_ . self_or_cls . param . _BATCH_WATCH = True self_or_cls = self_ . self_or_cls if args : if len ( args ) == 2 and not args [ 0 ] in kwargs and not kwargs : kwargs [ args [ 0 ] ] = args [ 1 ] else : self_ . self_or_cls . param . _BATCH_WATCH = False raise ValueError ( "Invalid positional arguments for %s.set_param" % ( self_or_cls . name ) ) for ( k , v ) in kwargs . items ( ) : if k not in self_or_cls . param : self_ . self_or_cls . param . _BATCH_WATCH = False raise ValueError ( "'%s' is not a parameter of %s" % ( k , self_or_cls . name ) ) try : setattr ( self_or_cls , k , v ) except : self_ . self_or_cls . param . _BATCH_WATCH = False raise self_ . self_or_cls . param . _BATCH_WATCH = BATCH_WATCH if not BATCH_WATCH : self_ . _batch_call_watchers ( )
For each param = value keyword argument sets the corresponding parameter of this object or class to the given value .
320
21
243,728
def objects ( self_ , instance = True ) : cls = self_ . cls # CB: we cache the parameters because this method is called often, # and parameters are rarely added (and cannot be deleted) try : pdict = getattr ( cls , '_%s__params' % cls . __name__ ) except AttributeError : paramdict = { } for class_ in classlist ( cls ) : for name , val in class_ . __dict__ . items ( ) : if isinstance ( val , Parameter ) : paramdict [ name ] = val # We only want the cache to be visible to the cls on which # params() is called, so we mangle the name ourselves at # runtime (if we were to mangle it now, it would be # _Parameterized.__params for all classes). setattr ( cls , '_%s__params' % cls . __name__ , paramdict ) pdict = paramdict if instance and self_ . self is not None : if instance == 'existing' : if self_ . self . _instance__params : return dict ( pdict , * * self_ . self . _instance__params ) return pdict else : return { k : self_ . self . param [ k ] for k in pdict } return pdict
Returns the Parameters of this instance or class
283
8
243,729
def trigger ( self_ , * param_names ) : events = self_ . self_or_cls . param . _events watchers = self_ . self_or_cls . param . _watchers self_ . self_or_cls . param . _events = [ ] self_ . self_or_cls . param . _watchers = [ ] param_values = dict ( self_ . get_param_values ( ) ) params = { name : param_values [ name ] for name in param_names } self_ . self_or_cls . param . _TRIGGER = True self_ . set_param ( * * params ) self_ . self_or_cls . param . _TRIGGER = False self_ . self_or_cls . param . _events = events self_ . self_or_cls . param . _watchers = watchers
Trigger watchers for the given set of parameter names . Watchers will be triggered whether or not the parameter values have actually changed .
197
26
243,730
def _update_event_type ( self_ , watcher , event , triggered ) : if triggered : event_type = 'triggered' else : event_type = 'changed' if watcher . onlychanged else 'set' return Event ( what = event . what , name = event . name , obj = event . obj , cls = event . cls , old = event . old , new = event . new , type = event_type )
Returns an updated Event object with the type field set appropriately .
97
12
243,731
def _call_watcher ( self_ , watcher , event ) : if self_ . self_or_cls . param . _TRIGGER : pass elif watcher . onlychanged and ( not self_ . _changed ( event ) ) : return if self_ . self_or_cls . param . _BATCH_WATCH : self_ . _events . append ( event ) if watcher not in self_ . _watchers : self_ . _watchers . append ( watcher ) elif watcher . mode == 'args' : with batch_watch ( self_ . self_or_cls , run = False ) : watcher . fn ( self_ . _update_event_type ( watcher , event , self_ . self_or_cls . param . _TRIGGER ) ) else : with batch_watch ( self_ . self_or_cls , run = False ) : event = self_ . _update_event_type ( watcher , event , self_ . self_or_cls . param . _TRIGGER ) watcher . fn ( * * { event . name : event . new } )
Invoke the given the watcher appropriately given a Event object .
251
13
243,732
def _batch_call_watchers ( self_ ) : while self_ . self_or_cls . param . _events : event_dict = OrderedDict ( [ ( ( event . name , event . what ) , event ) for event in self_ . self_or_cls . param . _events ] ) watchers = self_ . self_or_cls . param . _watchers [ : ] self_ . self_or_cls . param . _events = [ ] self_ . self_or_cls . param . _watchers = [ ] for watcher in watchers : events = [ self_ . _update_event_type ( watcher , event_dict [ ( name , watcher . what ) ] , self_ . self_or_cls . param . _TRIGGER ) for name in watcher . parameter_names if ( name , watcher . what ) in event_dict ] with batch_watch ( self_ . self_or_cls , run = False ) : if watcher . mode == 'args' : watcher . fn ( * events ) else : watcher . fn ( * * { c . name : c . new for c in events } )
Batch call a set of watchers based on the parameter value settings in kwargs using the queued Event and watcher objects .
265
28
243,733
def set_dynamic_time_fn ( self_ , time_fn , sublistattr = None ) : self_or_cls = self_ . self_or_cls self_or_cls . _Dynamic_time_fn = time_fn if isinstance ( self_or_cls , type ) : a = ( None , self_or_cls ) else : a = ( self_or_cls , ) for n , p in self_or_cls . param . objects ( 'existing' ) . items ( ) : if hasattr ( p , '_value_is_dynamic' ) : if p . _value_is_dynamic ( * a ) : g = self_or_cls . param . get_value_generator ( n ) g . _Dynamic_time_fn = time_fn if sublistattr : try : sublist = getattr ( self_or_cls , sublistattr ) except AttributeError : sublist = [ ] for obj in sublist : obj . param . set_dynamic_time_fn ( time_fn , sublistattr )
Set time_fn for all Dynamic Parameters of this class or instance object that are currently being dynamically generated .
246
21
243,734
def get_param_values ( self_ , onlychanged = False ) : self_or_cls = self_ . self_or_cls # CEB: we'd actually like to know whether a value has been # explicitly set on the instance, but I'm not sure that's easy # (would need to distinguish instantiation of default from # user setting of value). vals = [ ] for name , val in self_or_cls . param . objects ( 'existing' ) . items ( ) : value = self_or_cls . param . get_value_generator ( name ) # (this is pointless for cls) if not onlychanged or not all_equal ( value , val . default ) : vals . append ( ( name , value ) ) vals . sort ( key = itemgetter ( 0 ) ) return vals
Return a list of name value pairs for all Parameters of this object .
183
14
243,735
def force_new_dynamic_value ( self_ , name ) : # pylint: disable-msg=E0213 cls_or_slf = self_ . self_or_cls param_obj = cls_or_slf . param . objects ( 'existing' ) . get ( name ) if not param_obj : return getattr ( cls_or_slf , name ) cls , slf = None , None if isinstance ( cls_or_slf , type ) : cls = cls_or_slf else : slf = cls_or_slf if not hasattr ( param_obj , '_force' ) : return param_obj . __get__ ( slf , cls ) else : return param_obj . _force ( slf , cls )
Force a new value to be generated for the dynamic attribute name and return it .
182
16
243,736
def get_value_generator ( self_ , name ) : # pylint: disable-msg=E0213 cls_or_slf = self_ . self_or_cls param_obj = cls_or_slf . param . objects ( 'existing' ) . get ( name ) if not param_obj : value = getattr ( cls_or_slf , name ) # CompositeParameter detected by being a Parameter and having 'attribs' elif hasattr ( param_obj , 'attribs' ) : value = [ cls_or_slf . param . get_value_generator ( a ) for a in param_obj . attribs ] # not a Dynamic Parameter elif not hasattr ( param_obj , '_value_is_dynamic' ) : value = getattr ( cls_or_slf , name ) # Dynamic Parameter... else : internal_name = "_%s_param_value" % name if hasattr ( cls_or_slf , internal_name ) : # dealing with object and it's been set on this object value = getattr ( cls_or_slf , internal_name ) else : # dealing with class or isn't set on the object value = param_obj . default return value
Return the value or value - generating object of the named attribute .
285
13
243,737
def inspect_value ( self_ , name ) : # pylint: disable-msg=E0213 cls_or_slf = self_ . self_or_cls param_obj = cls_or_slf . param . objects ( 'existing' ) . get ( name ) if not param_obj : value = getattr ( cls_or_slf , name ) elif hasattr ( param_obj , 'attribs' ) : value = [ cls_or_slf . param . inspect_value ( a ) for a in param_obj . attribs ] elif not hasattr ( param_obj , '_inspect' ) : value = getattr ( cls_or_slf , name ) else : if isinstance ( cls_or_slf , type ) : value = param_obj . _inspect ( None , cls_or_slf ) else : value = param_obj . _inspect ( cls_or_slf , None ) return value
Return the current value of the named attribute without modifying it .
225
12
243,738
def outputs ( self_ ) : outputs = { } for cls in classlist ( self_ . cls ) : for name in dir ( cls ) : method = getattr ( self_ . self_or_cls , name ) dinfo = getattr ( method , '_dinfo' , { } ) if 'outputs' not in dinfo : continue for override , otype , idx in dinfo [ 'outputs' ] : if override is not None : name = override outputs [ name ] = ( otype , method , idx ) return outputs
Returns a mapping between any declared outputs and a tuple of the declared Parameter type the output method and the index into the output if multiple outputs are returned .
122
31
243,739
def unwatch ( self_ , watcher ) : try : self_ . _watch ( 'remove' , watcher ) except : self_ . warning ( 'No such watcher {watcher} to remove.' . format ( watcher = watcher ) )
Unwatch watchers set either with watch or watch_values .
55
13
243,740
def print_param_values ( self_ ) : self = self_ . self for name , val in self . param . get_param_values ( ) : print ( '%s.%s = %s' % ( self . name , name , val ) )
Print the values of all this object s Parameters .
57
10
243,741
def warning ( self_ , msg , * args , * * kw ) : if not warnings_as_exceptions : global warning_count warning_count += 1 self_ . __db_print ( WARNING , msg , * args , * * kw ) else : raise Exception ( "Warning: " + msg % args )
Print msg merged with args as a warning unless module variable warnings_as_exceptions is True then raise an Exception containing the arguments .
69
27
243,742
def message ( self_ , msg , * args , * * kw ) : self_ . __db_print ( INFO , msg , * args , * * kw )
Print msg merged with args as a message .
37
9
243,743
def verbose ( self_ , msg , * args , * * kw ) : self_ . __db_print ( VERBOSE , msg , * args , * * kw )
Print msg merged with args as a verbose message .
40
11
243,744
def debug ( self_ , msg , * args , * * kw ) : self_ . __db_print ( DEBUG , msg , * args , * * kw )
Print msg merged with args as a debugging statement .
37
10
243,745
def __class_docstring_signature ( mcs , max_repr_len = 15 ) : processed_kws , keyword_groups = set ( ) , [ ] for cls in reversed ( mcs . mro ( ) ) : keyword_group = [ ] for ( k , v ) in sorted ( cls . __dict__ . items ( ) ) : if isinstance ( v , Parameter ) and k not in processed_kws : param_type = v . __class__ . __name__ keyword_group . append ( "%s=%s" % ( k , param_type ) ) processed_kws . add ( k ) keyword_groups . append ( keyword_group ) keywords = [ el for grp in reversed ( keyword_groups ) for el in grp ] class_docstr = "\n" + mcs . __doc__ if mcs . __doc__ else '' signature = "params(%s)" % ( ", " . join ( keywords ) ) description = param_pager ( mcs ) if ( docstring_describe_params and param_pager ) else '' mcs . __doc__ = signature + class_docstr + '\n' + description
Autogenerate a keyword signature in the class docstring for all available parameters . This is particularly useful in the IPython Notebook as IPython will parse this signature to allow tab - completion of keywords .
260
42
243,746
def __param_inheritance ( mcs , param_name , param ) : # get all relevant slots (i.e. slots defined in all # superclasses of this parameter) slots = { } for p_class in classlist ( type ( param ) ) [ 1 : : ] : slots . update ( dict . fromkeys ( p_class . __slots__ ) ) # note for some eventual future: python 3.6+ descriptors grew # __set_name__, which could replace this and _set_names setattr ( param , 'owner' , mcs ) del slots [ 'owner' ] # backwards compatibility (see Composite parameter) if 'objtype' in slots : setattr ( param , 'objtype' , mcs ) del slots [ 'objtype' ] # instantiate is handled specially for superclass in classlist ( mcs ) [ : : - 1 ] : super_param = superclass . __dict__ . get ( param_name ) if isinstance ( super_param , Parameter ) and super_param . instantiate is True : param . instantiate = True del slots [ 'instantiate' ] for slot in slots . keys ( ) : superclasses = iter ( classlist ( mcs ) [ : : - 1 ] ) # Search up the hierarchy until param.slot (which has to # be obtained using getattr(param,slot)) is not None, or # we run out of classes to search. while getattr ( param , slot ) is None : try : param_super_class = next ( superclasses ) except StopIteration : break new_param = param_super_class . __dict__ . get ( param_name ) if new_param is not None and hasattr ( new_param , slot ) : # (slot might not be there because could be a more # general type of Parameter) new_value = getattr ( new_param , slot ) setattr ( param , slot , new_value )
Look for Parameter values in superclasses of this Parameterized class .
420
15
243,747
def _check_params ( self , params ) : overridden_object_params = list ( self . _overridden . param ) for item in params : if item not in overridden_object_params : self . param . warning ( "'%s' will be ignored (not a Parameter)." , item )
Print a warning if params contains something that is not a Parameter of the overridden object .
66
19
243,748
def _extract_extra_keywords ( self , params ) : extra_keywords = { } overridden_object_params = list ( self . _overridden . param ) for name , val in params . items ( ) : if name not in overridden_object_params : extra_keywords [ name ] = val # CEBALERT: should we remove name from params # (i.e. del params[name]) so that it's only available # via extra_keywords()? return extra_keywords
Return any items in params that are not also parameters of the overridden object .
112
16
243,749
def instance ( self_or_cls , * * params ) : if isinstance ( self_or_cls , ParameterizedMetaclass ) : cls = self_or_cls else : p = params params = dict ( self_or_cls . get_param_values ( ) ) params . update ( p ) params . pop ( 'name' ) cls = self_or_cls . __class__ inst = Parameterized . __new__ ( cls ) Parameterized . __init__ ( inst , * * params ) if 'name' in params : inst . __name__ = params [ 'name' ] else : inst . __name__ = self_or_cls . name return inst
Return an instance of this class copying parameters from any existing instance provided .
158
14
243,750
def script_repr ( self , imports = [ ] , prefix = " " ) : return self . pprint ( imports , prefix , unknown_value = '' , qualify = True , separator = "\n" )
Same as Parameterized . script_repr except that X . classname ( Y is replaced with X . classname . instance ( Y
46
29
243,751
def pprint ( self , imports = None , prefix = "\n " , unknown_value = '<?>' , qualify = False , separator = "" ) : r = Parameterized . pprint ( self , imports , prefix , unknown_value = unknown_value , qualify = qualify , separator = separator ) classname = self . __class__ . __name__ return r . replace ( ".%s(" % classname , ".%s.instance(" % classname )
Same as Parameterized . pprint except that X . classname ( Y is replaced with X . classname . instance ( Y
103
27
243,752
def resolve_filename ( self , package_dir , filename ) : sass_path = os . path . join ( package_dir , self . sass_path , filename ) if self . strip_extension : filename , _ = os . path . splitext ( filename ) css_filename = filename + '.css' css_path = os . path . join ( package_dir , self . css_path , css_filename ) return sass_path , css_path
Gets a proper full relative path of Sass source and CSS source that will be generated according to package_dir and filename .
107
25
243,753
def unresolve_filename ( self , package_dir , filename ) : filename , _ = os . path . splitext ( filename ) if self . strip_extension : for ext in ( '.scss' , '.sass' ) : test_path = os . path . join ( package_dir , self . sass_path , filename + ext , ) if os . path . exists ( test_path ) : return filename + ext else : # file not found, let it error with `.scss` extension return filename + '.scss' else : return filename
Retrieves the probable source path from the output filename . Pass in a . css path to get out a . scss path .
122
28
243,754
def _validate_importers ( importers ) : # They could have no importers, that's chill if importers is None : return None def _to_importer ( priority , func ) : assert isinstance ( priority , int ) , priority assert callable ( func ) , func return ( priority , _importer_callback_wrapper ( func ) ) # Our code assumes tuple of tuples return tuple ( _to_importer ( priority , func ) for priority , func in importers )
Validates the importers and decorates the callables with our output formatter .
106
17
243,755
def and_join ( strings ) : last = len ( strings ) - 1 if last == 0 : return strings [ 0 ] elif last < 0 : return '' iterator = enumerate ( strings ) return ', ' . join ( 'and ' + s if i == last else s for i , s in iterator )
Join the given strings by commas with last and conjuction .
65
13
243,756
def allow_staff_or_superuser ( func ) : is_object_permission = "has_object" in func . __name__ @ wraps ( func ) def func_wrapper ( * args , * * kwargs ) : request = args [ 0 ] # use second parameter if object permission if is_object_permission : request = args [ 1 ] if request . user . is_staff or request . user . is_superuser : return True return func ( * args , * * kwargs ) return func_wrapper
This decorator is used to abstract common is_staff and is_superuser functionality out of permission checks . It determines which parameter is the request based on name .
114
33
243,757
def authenticated_users ( func ) : is_object_permission = "has_object" in func . __name__ @ wraps ( func ) def func_wrapper ( * args , * * kwargs ) : request = args [ 0 ] # use second parameter if object permission if is_object_permission : request = args [ 1 ] if not ( request . user and request . user . is_authenticated ) : return False return func ( * args , * * kwargs ) return func_wrapper
This decorator is used to abstract common authentication checking functionality out of permission checks . It determines which parameter is the request based on name .
108
27
243,758
def filter_queryset ( self , request , queryset , view ) : # Check if this is a list type request if view . lookup_field not in view . kwargs : if not self . action_routing : return self . filter_list_queryset ( request , queryset , view ) else : method_name = "filter_{action}_queryset" . format ( action = view . action ) return getattr ( self , method_name ) ( request , queryset , view ) return queryset
This method overrides the standard filter_queryset method . This method will check to see if the view calling this is from a list type action . This function will also route the filter by action type if action_routing is set to True .
117
51
243,759
def has_permission ( self , request , view ) : if not self . global_permissions : return True serializer_class = view . get_serializer_class ( ) assert serializer_class . Meta . model is not None , ( "global_permissions set to true without a model " "set on the serializer for '%s'" % view . __class__ . __name__ ) model_class = serializer_class . Meta . model action_method_name = None if hasattr ( view , 'action' ) : action = self . _get_action ( view . action ) action_method_name = "has_{action}_permission" . format ( action = action ) # If the specific action permission exists then use it, otherwise use general. if hasattr ( model_class , action_method_name ) : return getattr ( model_class , action_method_name ) ( request ) if request . method in permissions . SAFE_METHODS : assert hasattr ( model_class , 'has_read_permission' ) , self . _get_error_message ( model_class , 'has_read_permission' , action_method_name ) return model_class . has_read_permission ( request ) else : assert hasattr ( model_class , 'has_write_permission' ) , self . _get_error_message ( model_class , 'has_write_permission' , action_method_name ) return model_class . has_write_permission ( request )
Overrides the standard function and figures out methods to call for global permissions .
334
16
243,760
def has_object_permission ( self , request , view , obj ) : if not self . object_permissions : return True serializer_class = view . get_serializer_class ( ) model_class = serializer_class . Meta . model action_method_name = None if hasattr ( view , 'action' ) : action = self . _get_action ( view . action ) action_method_name = "has_object_{action}_permission" . format ( action = action ) # If the specific action permission exists then use it, otherwise use general. if hasattr ( obj , action_method_name ) : return getattr ( obj , action_method_name ) ( request ) if request . method in permissions . SAFE_METHODS : assert hasattr ( obj , 'has_object_read_permission' ) , self . _get_error_message ( model_class , 'has_object_read_permission' , action_method_name ) return obj . has_object_read_permission ( request ) else : assert hasattr ( obj , 'has_object_write_permission' ) , self . _get_error_message ( model_class , 'has_object_write_permission' , action_method_name ) return obj . has_object_write_permission ( request )
Overrides the standard function and figures out methods to call for object permissions .
292
16
243,761
def _get_action ( self , action ) : return_action = action if self . partial_update_is_update and action == 'partial_update' : return_action = 'update' return return_action
Utility function that consolidates actions if necessary .
46
10
243,762
def _get_error_message ( self , model_class , method_name , action_method_name ) : if action_method_name : return "'{}' does not have '{}' or '{}' defined." . format ( model_class , method_name , action_method_name ) else : return "'{}' does not have '{}' defined." . format ( model_class , method_name )
Get assertion error message depending if there are actions permissions methods defined .
95
13
243,763
def bind ( self , field_name , parent ) : assert parent . Meta . model is not None , "DRYPermissions is used on '{}' without a model" . format ( parent . __class__ . __name__ ) for action in self . actions : if not self . object_only : global_method_name = "has_{action}_permission" . format ( action = action ) if hasattr ( parent . Meta . model , global_method_name ) : self . action_method_map [ action ] = { 'global' : global_method_name } if not self . global_only : object_method_name = "has_object_{action}_permission" . format ( action = action ) if hasattr ( parent . Meta . model , object_method_name ) : if self . action_method_map . get ( action , None ) is None : self . action_method_map [ action ] = { } self . action_method_map [ action ] [ 'object' ] = object_method_name super ( DRYPermissionsField , self ) . bind ( field_name , parent )
Check the model attached to the serializer to see what methods are defined and save them .
247
18
243,764
def skip_prepare ( func ) : @ wraps ( func ) def _wrapper ( self , * args , * * kwargs ) : value = func ( self , * args , * * kwargs ) return Data ( value , should_prepare = False ) return _wrapper
A convenience decorator for indicating the raw data should not be prepared .
60
14
243,765
def build_error ( self , err ) : data = { 'error' : err . args [ 0 ] , } if self . is_debug ( ) : # Add the traceback. data [ 'traceback' ] = format_traceback ( sys . exc_info ( ) ) body = self . serializer . serialize ( data ) status = getattr ( err , 'status' , 500 ) return self . build_response ( body , status = status )
When an exception is encountered this generates a JSON error message for display to the user .
99
17
243,766
def deserialize ( self , method , endpoint , body ) : if endpoint == 'list' : return self . deserialize_list ( body ) return self . deserialize_detail ( body )
A convenience method for deserializing the body of a request .
43
13
243,767
def serialize ( self , method , endpoint , data ) : if endpoint == 'list' : # Create is a special-case, because you POST it to the collection, # not to a detail. if method == 'POST' : return self . serialize_detail ( data ) return self . serialize_list ( data ) return self . serialize_detail ( data )
A convenience method for serializing data for a response .
79
11
243,768
def _method ( self , * args , * * kwargs ) : yield self . resource_handler . handle ( self . __resource_view_type__ , * args , * * kwargs )
the body of those http - methods used in tornado . web . RequestHandler
44
15
243,769
def as_view ( cls , view_type , * init_args , * * init_kwargs ) : global _method new_cls = type ( cls . __name__ + '_' + _BridgeMixin . __name__ + '_restless' , ( _BridgeMixin , cls . _request_handler_base_ , ) , dict ( __resource_cls__ = cls , __resource_args__ = init_args , __resource_kwargs__ = init_kwargs , __resource_view_type__ = view_type ) ) """ Add required http-methods to the newly created class We need to scan through MRO to find what functions users declared, and then add corresponding http-methods used by Tornado. """ bases = inspect . getmro ( cls ) bases = bases [ 0 : bases . index ( Resource ) - 1 ] for k , v in cls . http_methods [ view_type ] . items ( ) : if any ( v in base_cls . __dict__ for base_cls in bases ) : setattr ( new_cls , k . lower ( ) , _method ) return new_cls
Return a subclass of tornado . web . RequestHandler and apply required setting .
258
15
243,770
def handle ( self , endpoint , * args , * * kwargs ) : method = self . request_method ( ) try : if not method in self . http_methods . get ( endpoint , { } ) : raise MethodNotImplemented ( "Unsupported method '{}' for {} endpoint." . format ( method , endpoint ) ) if not self . is_authenticated ( ) : raise Unauthorized ( ) self . data = self . deserialize ( method , endpoint , self . request_body ( ) ) view_method = getattr ( self , self . http_methods [ endpoint ] [ method ] ) data = view_method ( * args , * * kwargs ) if is_future ( data ) : # need to check if the view_method is a generator or not data = yield data serialized = self . serialize ( method , endpoint , data ) except Exception as err : raise gen . Return ( self . handle_error ( err ) ) status = self . status_map . get ( self . http_methods [ endpoint ] [ method ] , OK ) raise gen . Return ( self . build_response ( serialized , status = status ) )
almost identical to Resource . handle except the way we handle the return value of view_method .
252
19
243,771
def prepare ( self , data ) : result = { } if not self . fields : # No fields specified. Serialize everything. return data for fieldname , lookup in self . fields . items ( ) : if isinstance ( lookup , SubPreparer ) : result [ fieldname ] = lookup . prepare ( data ) else : result [ fieldname ] = self . lookup_data ( lookup , data ) return result
Handles transforming the provided data into the fielded data that should be exposed to the end user .
86
19
243,772
def lookup_data ( self , lookup , data ) : value = data parts = lookup . split ( '.' ) if not parts or not parts [ 0 ] : return value part = parts [ 0 ] remaining_lookup = '.' . join ( parts [ 1 : ] ) if callable ( getattr ( data , 'keys' , None ) ) and hasattr ( data , '__getitem__' ) : # Dictionary enough for us. value = data [ part ] elif data is not None : # Assume it's an object. value = getattr ( data , part ) # Call if it's callable except if it's a Django DB manager instance # We check if is a manager by checking the db_manager (duck typing) if callable ( value ) and not hasattr ( value , 'db_manager' ) : value = value ( ) if not remaining_lookup : return value # There's more to lookup, so dive in recursively. return self . lookup_data ( remaining_lookup , value )
Given a lookup string attempts to descend through nested data looking for the value .
220
15
243,773
def prepare ( self , data ) : result = [ ] for item in self . get_inner_data ( data ) : result . append ( self . preparer . prepare ( item ) ) return result
Handles passing each item in the collection data to the configured subpreparer .
42
16
243,774
def build_url_name ( cls , name , name_prefix = None ) : if name_prefix is None : name_prefix = 'api_{}' . format ( cls . __name__ . replace ( 'Resource' , '' ) . lower ( ) ) name_prefix = name_prefix . rstrip ( '_' ) return '_' . join ( [ name_prefix , name ] )
Given a name & an optional name_prefix this generates a name for a URL .
88
17
243,775
def build_endpoint_name ( cls , name , endpoint_prefix = None ) : if endpoint_prefix is None : endpoint_prefix = 'api_{}' . format ( cls . __name__ . replace ( 'Resource' , '' ) . lower ( ) ) endpoint_prefix = endpoint_prefix . rstrip ( '_' ) return '_' . join ( [ endpoint_prefix , name ] )
Given a name & an optional endpoint_prefix this generates a name for a URL .
89
17
243,776
def build_routename ( cls , name , routename_prefix = None ) : if routename_prefix is None : routename_prefix = 'api_{}' . format ( cls . __name__ . replace ( 'Resource' , '' ) . lower ( ) ) routename_prefix = routename_prefix . rstrip ( '_' ) return '_' . join ( [ routename_prefix , name ] )
Given a name & an optional routename_prefix this generates a name for a URL .
94
18
243,777
def add_views ( cls , config , rule_prefix , routename_prefix = None ) : methods = ( 'GET' , 'POST' , 'PUT' , 'DELETE' ) config . add_route ( cls . build_routename ( 'list' , routename_prefix ) , rule_prefix ) config . add_view ( cls . as_list ( ) , route_name = cls . build_routename ( 'list' , routename_prefix ) , request_method = methods ) config . add_route ( cls . build_routename ( 'detail' , routename_prefix ) , rule_prefix + '{name}/' ) config . add_view ( cls . as_detail ( ) , route_name = cls . build_routename ( 'detail' , routename_prefix ) , request_method = methods ) return config
A convenience method for registering the routes and views in pyramid .
198
12
243,778
def deserialize ( self , body ) : try : if isinstance ( body , bytes ) : return json . loads ( body . decode ( 'utf-8' ) ) return json . loads ( body ) except ValueError : raise BadRequest ( 'Request body is not valid JSON' )
The low - level deserialization .
61
8
243,779
def convert_mnist ( directory , output_directory , output_filename = None , dtype = None ) : if not output_filename : if dtype : output_filename = 'mnist_{}.hdf5' . format ( dtype ) else : output_filename = 'mnist.hdf5' output_path = os . path . join ( output_directory , output_filename ) h5file = h5py . File ( output_path , mode = 'w' ) train_feat_path = os . path . join ( directory , TRAIN_IMAGES ) train_features = read_mnist_images ( train_feat_path , dtype ) train_lab_path = os . path . join ( directory , TRAIN_LABELS ) train_labels = read_mnist_labels ( train_lab_path ) test_feat_path = os . path . join ( directory , TEST_IMAGES ) test_features = read_mnist_images ( test_feat_path , dtype ) test_lab_path = os . path . join ( directory , TEST_LABELS ) test_labels = read_mnist_labels ( test_lab_path ) data = ( ( 'train' , 'features' , train_features ) , ( 'train' , 'targets' , train_labels ) , ( 'test' , 'features' , test_features ) , ( 'test' , 'targets' , test_labels ) ) fill_hdf5_file ( h5file , data ) h5file [ 'features' ] . dims [ 0 ] . label = 'batch' h5file [ 'features' ] . dims [ 1 ] . label = 'channel' h5file [ 'features' ] . dims [ 2 ] . label = 'height' h5file [ 'features' ] . dims [ 3 ] . label = 'width' h5file [ 'targets' ] . dims [ 0 ] . label = 'batch' h5file [ 'targets' ] . dims [ 1 ] . label = 'index' h5file . flush ( ) h5file . close ( ) return ( output_path , )
Converts the MNIST dataset to HDF5 .
485
11
243,780
def fill_subparser ( subparser ) : subparser . add_argument ( "--dtype" , help = "dtype to save to; by default, images will be " + "returned in their original unsigned byte format" , choices = ( 'float32' , 'float64' , 'bool' ) , type = str , default = None ) return convert_mnist
Sets up a subparser to convert the MNIST dataset files .
82
14
243,781
def read_mnist_images ( filename , dtype = None ) : with gzip . open ( filename , 'rb' ) as f : magic , number , rows , cols = struct . unpack ( '>iiii' , f . read ( 16 ) ) if magic != MNIST_IMAGE_MAGIC : raise ValueError ( "Wrong magic number reading MNIST image file" ) array = numpy . frombuffer ( f . read ( ) , dtype = 'uint8' ) array = array . reshape ( ( number , 1 , rows , cols ) ) if dtype : dtype = numpy . dtype ( dtype ) if dtype . kind == 'b' : # If the user wants Booleans, threshold at half the range. array = array >= 128 elif dtype . kind == 'f' : # Otherwise, just convert. array = array . astype ( dtype ) array /= 255. else : raise ValueError ( "Unknown dtype to convert MNIST to" ) return array
Read MNIST images from the original ubyte file format .
222
12
243,782
def read_mnist_labels ( filename ) : with gzip . open ( filename , 'rb' ) as f : magic , _ = struct . unpack ( '>ii' , f . read ( 8 ) ) if magic != MNIST_LABEL_MAGIC : raise ValueError ( "Wrong magic number reading MNIST label file" ) array = numpy . frombuffer ( f . read ( ) , dtype = 'uint8' ) array = array . reshape ( array . size , 1 ) return array
Read MNIST labels from the original ubyte file format .
113
12
243,783
def prepare_hdf5_file ( hdf5_file , n_train , n_valid , n_test ) : n_total = n_train + n_valid + n_test splits = create_splits ( n_train , n_valid , n_test ) hdf5_file . attrs [ 'split' ] = H5PYDataset . create_split_array ( splits ) vlen_dtype = h5py . special_dtype ( vlen = numpy . dtype ( 'uint8' ) ) hdf5_file . create_dataset ( 'encoded_images' , shape = ( n_total , ) , dtype = vlen_dtype ) hdf5_file . create_dataset ( 'targets' , shape = ( n_total , 1 ) , dtype = numpy . int16 ) hdf5_file . create_dataset ( 'filenames' , shape = ( n_total , 1 ) , dtype = 'S32' )
Create datasets within a given HDF5 file .
232
10
243,784
def process_train_set ( hdf5_file , train_archive , patch_archive , n_train , wnid_map , shuffle_seed = None ) : producer = partial ( train_set_producer , train_archive = train_archive , patch_archive = patch_archive , wnid_map = wnid_map ) consumer = partial ( image_consumer , hdf5_file = hdf5_file , num_expected = n_train , shuffle_seed = shuffle_seed ) producer_consumer ( producer , consumer )
Process the ILSVRC2010 training set .
121
10
243,785
def image_consumer ( socket , hdf5_file , num_expected , shuffle_seed = None , offset = 0 ) : with progress_bar ( 'images' , maxval = num_expected ) as pb : if shuffle_seed is None : index_gen = iter ( xrange ( num_expected ) ) else : rng = numpy . random . RandomState ( shuffle_seed ) index_gen = iter ( rng . permutation ( num_expected ) ) for i , num in enumerate ( index_gen ) : image_filename , class_index = socket . recv_pyobj ( zmq . SNDMORE ) image_data = numpy . fromstring ( socket . recv ( ) , dtype = 'uint8' ) _write_to_hdf5 ( hdf5_file , num + offset , image_filename , image_data , class_index ) pb . update ( i + 1 )
Fill an HDF5 file with incoming images from a socket .
205
13
243,786
def process_other_set ( hdf5_file , which_set , image_archive , patch_archive , groundtruth , offset ) : producer = partial ( other_set_producer , image_archive = image_archive , patch_archive = patch_archive , groundtruth = groundtruth , which_set = which_set ) consumer = partial ( image_consumer , hdf5_file = hdf5_file , num_expected = len ( groundtruth ) , offset = offset ) producer_consumer ( producer , consumer )
Process the validation or test set .
114
7
243,787
def load_from_tar_or_patch ( tar , image_filename , patch_images ) : patched = True image_bytes = patch_images . get ( os . path . basename ( image_filename ) , None ) if image_bytes is None : patched = False try : image_bytes = tar . extractfile ( image_filename ) . read ( ) numpy . array ( Image . open ( io . BytesIO ( image_bytes ) ) ) except ( IOError , OSError ) : with gzip . GzipFile ( fileobj = tar . extractfile ( image_filename ) ) as gz : image_bytes = gz . read ( ) numpy . array ( Image . open ( io . BytesIO ( image_bytes ) ) ) return image_bytes , patched
Do everything necessary to process an image inside a TAR .
172
12
243,788
def read_devkit ( f ) : with tar_open ( f ) as tar : # Metadata table containing class hierarchy, textual descriptions, etc. meta_mat = tar . extractfile ( DEVKIT_META_PATH ) synsets , cost_matrix = read_metadata_mat_file ( meta_mat ) # Raw validation data groundtruth, ILSVRC2010 IDs. Confusingly # distributed inside the development kit archive. raw_valid_groundtruth = numpy . loadtxt ( tar . extractfile ( DEVKIT_VALID_GROUNDTRUTH_PATH ) , dtype = numpy . int16 ) return synsets , cost_matrix , raw_valid_groundtruth
Read relevant information from the development kit archive .
153
9
243,789
def extract_patch_images ( f , which_set ) : if which_set not in ( 'train' , 'valid' , 'test' ) : raise ValueError ( 'which_set must be one of train, valid, or test' ) which_set = 'val' if which_set == 'valid' else which_set patch_images = { } with tar_open ( f ) as tar : for info_obj in tar : if not info_obj . name . endswith ( '.JPEG' ) : continue # Pretty sure that '/' is used for tarfile regardless of # os.path.sep, but I officially don't care about Windows. tokens = info_obj . name . split ( '/' ) file_which_set = tokens [ - 2 ] if file_which_set != which_set : continue filename = tokens [ - 1 ] patch_images [ filename ] = tar . extractfile ( info_obj . name ) . read ( ) return patch_images
Extracts a dict of the patch images for ILSVRC2010 .
214
16
243,790
def convert_cifar10 ( directory , output_directory , output_filename = 'cifar10.hdf5' ) : output_path = os . path . join ( output_directory , output_filename ) h5file = h5py . File ( output_path , mode = 'w' ) input_file = os . path . join ( directory , DISTRIBUTION_FILE ) tar_file = tarfile . open ( input_file , 'r:gz' ) train_batches = [ ] for batch in range ( 1 , 6 ) : file = tar_file . extractfile ( 'cifar-10-batches-py/data_batch_%d' % batch ) try : if six . PY3 : array = cPickle . load ( file , encoding = 'latin1' ) else : array = cPickle . load ( file ) train_batches . append ( array ) finally : file . close ( ) train_features = numpy . concatenate ( [ batch [ 'data' ] . reshape ( batch [ 'data' ] . shape [ 0 ] , 3 , 32 , 32 ) for batch in train_batches ] ) train_labels = numpy . concatenate ( [ numpy . array ( batch [ 'labels' ] , dtype = numpy . uint8 ) for batch in train_batches ] ) train_labels = numpy . expand_dims ( train_labels , 1 ) file = tar_file . extractfile ( 'cifar-10-batches-py/test_batch' ) try : if six . PY3 : test = cPickle . load ( file , encoding = 'latin1' ) else : test = cPickle . load ( file ) finally : file . close ( ) test_features = test [ 'data' ] . reshape ( test [ 'data' ] . shape [ 0 ] , 3 , 32 , 32 ) test_labels = numpy . array ( test [ 'labels' ] , dtype = numpy . uint8 ) test_labels = numpy . expand_dims ( test_labels , 1 ) data = ( ( 'train' , 'features' , train_features ) , ( 'train' , 'targets' , train_labels ) , ( 'test' , 'features' , test_features ) , ( 'test' , 'targets' , test_labels ) ) fill_hdf5_file ( h5file , data ) h5file [ 'features' ] . dims [ 0 ] . label = 'batch' h5file [ 'features' ] . dims [ 1 ] . label = 'channel' h5file [ 'features' ] . dims [ 2 ] . label = 'height' h5file [ 'features' ] . dims [ 3 ] . label = 'width' h5file [ 'targets' ] . dims [ 0 ] . label = 'batch' h5file [ 'targets' ] . dims [ 1 ] . label = 'index' h5file . flush ( ) h5file . close ( ) return ( output_path , )
Converts the CIFAR - 10 dataset to HDF5 .
696
14
243,791
def check_exists ( required_files ) : def function_wrapper ( f ) : @ wraps ( f ) def wrapped ( directory , * args , * * kwargs ) : missing = [ ] for filename in required_files : if not os . path . isfile ( os . path . join ( directory , filename ) ) : missing . append ( filename ) if len ( missing ) > 0 : raise MissingInputFiles ( 'Required files missing' , missing ) return f ( directory , * args , * * kwargs ) return wrapped return function_wrapper
Decorator that checks if required files exist before running .
118
12
243,792
def fill_hdf5_file ( h5file , data ) : # Check that all sources for a split have the same length split_names = set ( split_tuple [ 0 ] for split_tuple in data ) for name in split_names : lengths = [ len ( split_tuple [ 2 ] ) for split_tuple in data if split_tuple [ 0 ] == name ] if not all ( le == lengths [ 0 ] for le in lengths ) : raise ValueError ( "split '{}' has sources that " . format ( name ) + "vary in length" ) # Initialize split dictionary split_dict = dict ( [ ( split_name , { } ) for split_name in split_names ] ) # Compute total source lengths and check that splits have the same dtype # across a source source_names = set ( split_tuple [ 1 ] for split_tuple in data ) for name in source_names : splits = [ s for s in data if s [ 1 ] == name ] indices = numpy . cumsum ( [ 0 ] + [ len ( s [ 2 ] ) for s in splits ] ) if not all ( s [ 2 ] . dtype == splits [ 0 ] [ 2 ] . dtype for s in splits ) : raise ValueError ( "source '{}' has splits that " . format ( name ) + "vary in dtype" ) if not all ( s [ 2 ] . shape [ 1 : ] == splits [ 0 ] [ 2 ] . shape [ 1 : ] for s in splits ) : raise ValueError ( "source '{}' has splits that " . format ( name ) + "vary in shapes" ) dataset = h5file . create_dataset ( name , ( sum ( len ( s [ 2 ] ) for s in splits ) , ) + splits [ 0 ] [ 2 ] . shape [ 1 : ] , dtype = splits [ 0 ] [ 2 ] . dtype ) dataset [ ... ] = numpy . concatenate ( [ s [ 2 ] for s in splits ] , axis = 0 ) for i , j , s in zip ( indices [ : - 1 ] , indices [ 1 : ] , splits ) : if len ( s ) == 4 : split_dict [ s [ 0 ] ] [ name ] = ( i , j , None , s [ 3 ] ) else : split_dict [ s [ 0 ] ] [ name ] = ( i , j ) h5file . attrs [ 'split' ] = H5PYDataset . create_split_array ( split_dict )
Fills an HDF5 file in a H5PYDataset - compatible manner .
562
20
243,793
def progress_bar ( name , maxval , prefix = 'Converting' ) : widgets = [ '{} {}: ' . format ( prefix , name ) , Percentage ( ) , ' ' , Bar ( marker = '=' , left = '[' , right = ']' ) , ' ' , ETA ( ) ] bar = ProgressBar ( widgets = widgets , max_value = maxval , fd = sys . stdout ) . start ( ) try : yield bar finally : bar . update ( maxval ) bar . finish ( )
Manages a progress bar for a conversion .
114
9
243,794
def convert_iris ( directory , output_directory , output_filename = 'iris.hdf5' ) : classes = { b'Iris-setosa' : 0 , b'Iris-versicolor' : 1 , b'Iris-virginica' : 2 } data = numpy . loadtxt ( os . path . join ( directory , 'iris.data' ) , converters = { 4 : lambda x : classes [ x ] } , delimiter = ',' ) features = data [ : , : - 1 ] . astype ( 'float32' ) targets = data [ : , - 1 ] . astype ( 'uint8' ) . reshape ( ( - 1 , 1 ) ) data = ( ( 'all' , 'features' , features ) , ( 'all' , 'targets' , targets ) ) output_path = os . path . join ( output_directory , output_filename ) h5file = h5py . File ( output_path , mode = 'w' ) fill_hdf5_file ( h5file , data ) h5file [ 'features' ] . dims [ 0 ] . label = 'batch' h5file [ 'features' ] . dims [ 1 ] . label = 'feature' h5file [ 'targets' ] . dims [ 0 ] . label = 'batch' h5file [ 'targets' ] . dims [ 1 ] . label = 'index' h5file . flush ( ) h5file . close ( ) return ( output_path , )
Convert the Iris dataset to HDF5 .
340
10
243,795
def fill_subparser ( subparser ) : urls = ( [ None ] * len ( ALL_FILES ) ) filenames = list ( ALL_FILES ) subparser . set_defaults ( urls = urls , filenames = filenames ) subparser . add_argument ( '-P' , '--url-prefix' , type = str , default = None , help = "URL prefix to prepend to the filenames of " "non-public files, in order to download them. " "Be sure to include the trailing slash." ) return default_downloader
Sets up a subparser to download the ILSVRC2012 dataset files .
129
17
243,796
def _get_target_index ( self ) : return ( self . index + self . source_window * ( not self . overlapping ) + self . offset )
Return the index where the target window starts .
34
9
243,797
def _get_end_index ( self ) : return max ( self . index + self . source_window , self . _get_target_index ( ) + self . target_window )
Return the end of both windows .
41
7
243,798
def convert_svhn ( which_format , directory , output_directory , output_filename = None ) : if which_format not in ( 1 , 2 ) : raise ValueError ( "SVHN format needs to be either 1 or 2." ) if not output_filename : output_filename = 'svhn_format_{}.hdf5' . format ( which_format ) if which_format == 1 : return convert_svhn_format_1 ( directory , output_directory , output_filename ) else : return convert_svhn_format_2 ( directory , output_directory , output_filename )
Converts the SVHN dataset to HDF5 .
131
12
243,799
def open_ ( filename , mode = 'r' , encoding = None ) : if filename . endswith ( '.gz' ) : if six . PY2 : zf = io . BufferedReader ( gzip . open ( filename , mode ) ) if encoding : return codecs . getreader ( encoding ) ( zf ) else : return zf else : return io . BufferedReader ( gzip . open ( filename , mode , encoding = encoding ) ) if six . PY2 : if encoding : return codecs . open ( filename , mode , encoding = encoding ) else : return open ( filename , mode ) else : return open ( filename , mode , encoding = encoding )
Open a text file with encoding and optional gzip compression .
145
12