idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
43,800
def update_effects ( self , node ) : return [ self . combine ( node . args [ 0 ] , node_args_k , register = True , aliasing_type = True ) for node_args_k in node . args [ 1 : ] ]
Combiner when we update the first argument of a function .
43,801
def save_method ( elements , module_path ) : for elem , signature in elements . items ( ) : if isinstance ( signature , dict ) : save_method ( signature , module_path + ( elem , ) ) elif isinstance ( signature , Class ) : save_method ( signature . fields , module_path + ( elem , ) ) elif signature . ismethod ( ) : if elem in methods and module_path [ 0 ] != '__dispatch__' : assert elem in MODULES [ '__dispatch__' ] path = ( '__dispatch__' , ) methods [ elem ] = ( path , MODULES [ '__dispatch__' ] [ elem ] ) else : methods [ elem ] = ( module_path , signature )
Recursively save methods with module name and signature .
43,802
def save_function ( elements , module_path ) : for elem , signature in elements . items ( ) : if isinstance ( signature , dict ) : save_function ( signature , module_path + ( elem , ) ) elif signature . isstaticfunction ( ) : functions . setdefault ( elem , [ ] ) . append ( ( module_path , signature , ) ) elif isinstance ( signature , Class ) : save_function ( signature . fields , module_path + ( elem , ) )
Recursively save functions with module name and signature .
43,803
def save_attribute ( elements , module_path ) : for elem , signature in elements . items ( ) : if isinstance ( signature , dict ) : save_attribute ( signature , module_path + ( elem , ) ) elif signature . isattribute ( ) : assert elem not in attributes attributes [ elem ] = ( module_path , signature , ) elif isinstance ( signature , Class ) : save_attribute ( signature . fields , module_path + ( elem , ) )
Recursively save attributes with module name and signature .
43,804
def visit_Assign ( self , node ) : self . generic_visit ( node ) if node . value not in self . fixed_size_list : return node node . value = self . convert ( node . value ) return node
Replace list calls by static_list calls when possible
43,805
def copy_pkg ( self , pkg , src_only = False ) : "Install boost deps from the third_party directory" if getattr ( self , 'no_' + pkg ) is None : print ( 'Copying boost dependencies' ) to_copy = pkg , else : return src = os . path . join ( 'third_party' , * to_copy ) if not src_only : target = os . path . join ( self . build_lib , 'pythran' , * to_copy ) shutil . rmtree ( target , True ) shutil . copytree ( src , target ) target = os . path . join ( 'pythran' , * to_copy ) shutil . rmtree ( target , True ) shutil . copytree ( src , target )
Install boost deps from the third_party directory
43,806
def check_list ( self , node_list , pattern_list ) : if len ( node_list ) != len ( pattern_list ) : return False else : return all ( Check ( node_elt , self . placeholders ) . visit ( pattern_list [ i ] ) for i , node_elt in enumerate ( node_list ) )
Check if list of node are equal .
43,807
def visit_Placeholder ( self , pattern ) : if ( pattern . id in self . placeholders and not Check ( self . node , self . placeholders ) . visit ( self . placeholders [ pattern . id ] ) ) : return False else : self . placeholders [ pattern . id ] = self . node return True
Save matching node or compare it with the existing one .
43,808
def visit_AST_or ( self , pattern ) : return any ( self . field_match ( self . node , value_or ) for value_or in pattern . args )
Match if any of the or content match with the other node .
43,809
def visit_Set ( self , pattern ) : if len ( pattern . elts ) > MAX_UNORDERED_LENGTH : raise DamnTooLongPattern ( "Pattern for Set is too long" ) return ( isinstance ( self . node , Set ) and any ( self . check_list ( self . node . elts , pattern_elts ) for pattern_elts in permutations ( pattern . elts ) ) )
Set have unordered values .
43,810
def visit_Dict ( self , pattern ) : if not isinstance ( self . node , Dict ) : return False if len ( pattern . keys ) > MAX_UNORDERED_LENGTH : raise DamnTooLongPattern ( "Pattern for Dict is too long" ) for permutation in permutations ( range ( len ( self . node . keys ) ) ) : for i , value in enumerate ( permutation ) : if not self . field_match ( self . node . keys [ i ] , pattern . keys [ value ] ) : break else : pattern_values = [ pattern . values [ i ] for i in permutation ] return self . check_list ( self . node . values , pattern_values ) return False
Dict can match with unordered values .
43,811
def field_match ( self , node_field , pattern_field ) : is_good_list = ( isinstance ( pattern_field , list ) and self . check_list ( node_field , pattern_field ) ) is_good_node = ( isinstance ( pattern_field , AST ) and Check ( node_field , self . placeholders ) . visit ( pattern_field ) ) def strict_eq ( f0 , f1 ) : try : return f0 == f1 or ( isnan ( f0 ) and isnan ( f1 ) ) except TypeError : return f0 == f1 is_same = strict_eq ( pattern_field , node_field ) return is_good_list or is_good_node or is_same
Check if two fields match .
43,812
def generic_visit ( self , pattern ) : return ( isinstance ( pattern , type ( self . node ) ) and all ( self . field_match ( value , getattr ( pattern , field ) ) for field , value in iter_fields ( self . node ) ) )
Check if the pattern match with the checked node .
43,813
def visit ( self , node ) : if Check ( node , dict ( ) ) . visit ( self . pattern ) : self . result . add ( node ) self . generic_visit ( node )
Visitor looking for matching between current node and pattern .
43,814
def visit_Call ( self , node ) : md . visit ( self , node ) for arg in node . args : self . visit ( arg ) self . func_args_lazyness ( node . func , node . args , node ) self . visit ( node . func )
Compute use of variables in a function call .
43,815
def n_queens ( queen_count ) : out = list ( ) cols = range ( queen_count ) for vec in permutations ( cols , None ) : if ( queen_count == len ( set ( vec [ i ] + i for i in cols ) ) == len ( set ( vec [ i ] - i for i in cols ) ) ) : out . append ( vec ) return out
N - Queens solver .
43,816
def visit_Stmt ( self , node ) : save_defs , self . defs = self . defs or list ( ) , list ( ) self . generic_visit ( node ) new_defs , self . defs = self . defs , save_defs return new_defs + [ node ]
Add new variable definition before the Statement .
43,817
def visit_Call ( self , node ) : func_aliases = self . aliases [ node . func ] if len ( func_aliases ) == 1 : function_def = next ( iter ( func_aliases ) ) if ( isinstance ( function_def , ast . FunctionDef ) and function_def . name in self . inlinable ) : self . update = True to_inline = copy . deepcopy ( self . inlinable [ function_def . name ] ) arg_to_value = dict ( ) values = node . args values += to_inline . args . defaults [ len ( node . args ) - len ( to_inline . args . args ) : ] for arg_fun , arg_call in zip ( to_inline . args . args , values ) : v_name = "__pythran_inline{}{}{}" . format ( function_def . name , arg_fun . id , self . call_count ) new_var = ast . Name ( id = v_name , ctx = ast . Store ( ) , annotation = None ) self . defs . append ( ast . Assign ( targets = [ new_var ] , value = arg_call ) ) arg_to_value [ arg_fun . id ] = ast . Name ( id = v_name , ctx = ast . Load ( ) , annotation = None ) self . call_count += 1 return Inliner ( arg_to_value ) . visit ( to_inline . body [ 0 ] ) return node
Replace function call by inlined function s body .
43,818
def size_container_folding ( value ) : if len ( value ) < MAX_LEN : if isinstance ( value , list ) : return ast . List ( [ to_ast ( elt ) for elt in value ] , ast . Load ( ) ) elif isinstance ( value , tuple ) : return ast . Tuple ( [ to_ast ( elt ) for elt in value ] , ast . Load ( ) ) elif isinstance ( value , set ) : return ast . Set ( [ to_ast ( elt ) for elt in value ] ) elif isinstance ( value , dict ) : keys = [ to_ast ( elt ) for elt in value . keys ( ) ] values = [ to_ast ( elt ) for elt in value . values ( ) ] return ast . Dict ( keys , values ) elif isinstance ( value , np . ndarray ) : return ast . Call ( func = ast . Attribute ( ast . Name ( mangle ( 'numpy' ) , ast . Load ( ) , None ) , 'array' , ast . Load ( ) ) , args = [ to_ast ( totuple ( value . tolist ( ) ) ) , ast . Attribute ( ast . Name ( mangle ( 'numpy' ) , ast . Load ( ) , None ) , value . dtype . name , ast . Load ( ) ) ] , keywords = [ ] ) else : raise ConversionError ( ) else : raise ToNotEval ( )
Convert value to ast expression if size is not too big .
43,819
def builtin_folding ( value ) : if isinstance ( value , ( type ( None ) , bool ) ) : name = str ( value ) elif value . __name__ in ( "bool" , "float" , "int" ) : name = value . __name__ + "_" else : name = value . __name__ return ast . Attribute ( ast . Name ( '__builtin__' , ast . Load ( ) , None ) , name , ast . Load ( ) )
Convert builtin function to ast expression .
43,820
def to_ast ( value ) : if isinstance ( value , ( type ( None ) , bool ) ) : return builtin_folding ( value ) if sys . version_info [ 0 ] == 2 and isinstance ( value , long ) : from pythran . syntax import PythranSyntaxError raise PythranSyntaxError ( "constant folding results in big int" ) if any ( value is t for t in ( bool , int , float ) ) : iinfo = np . iinfo ( int ) if isinstance ( value , int ) and not ( iinfo . min <= value <= iinfo . max ) : from pythran . syntax import PythranSyntaxError raise PythranSyntaxError ( "constant folding results in big int" ) return builtin_folding ( value ) elif isinstance ( value , np . generic ) : return to_ast ( np . asscalar ( value ) ) elif isinstance ( value , numbers . Number ) : return ast . Num ( value ) elif isinstance ( value , str ) : return ast . Str ( value ) elif isinstance ( value , ( list , tuple , set , dict , np . ndarray ) ) : return size_container_folding ( value ) elif hasattr ( value , "__module__" ) and value . __module__ == "__builtin__" : return builtin_folding ( value ) elif sys . version_info . major == 3 : if isinstance ( value , ( filter , map , zip ) ) : return to_ast ( list ( value ) ) raise ToNotEval ( )
Turn a value into ast expression .
43,821
def visit_Module ( self , node ) : duc = SilentDefUseChains ( ) duc . visit ( node ) for d in duc . locals [ node ] : self . result [ d . name ( ) ] = d . node
Import module define a new variable name .
43,822
def attr_to_path ( node ) : def get_intrinsic_path ( modules , attr ) : if isinstance ( attr , ast . Name ) : return modules [ demangle ( attr . id ) ] , ( demangle ( attr . id ) , ) elif isinstance ( attr , ast . Attribute ) : module , path = get_intrinsic_path ( modules , attr . value ) return module [ attr . attr ] , path + ( attr . attr , ) obj , path = get_intrinsic_path ( MODULES , node ) if not obj . isliteral ( ) : path = path [ : - 1 ] + ( 'functor' , path [ - 1 ] ) return obj , ( 'pythonic' , ) + path
Compute path and final object for an attribute node
43,823
def path_to_attr ( path ) : return reduce ( lambda hpath , last : ast . Attribute ( hpath , last , ast . Load ( ) ) , path [ 1 : ] , ast . Name ( mangle ( path [ 0 ] ) , ast . Load ( ) , None ) )
Transform path to ast . Attribute .
43,824
def get_variable ( assignable ) : msg = "Only name and subscript can be assigned." assert isinstance ( assignable , ( ast . Name , ast . Subscript ) ) , msg while isinstance ( assignable , ast . Subscript ) or isattr ( assignable ) : if isattr ( assignable ) : assignable = assignable . args [ 0 ] else : assignable = assignable . value return assignable
Return modified variable name .
43,825
def prepare ( self , node ) : super ( Reorder , self ) . prepare ( node ) candidates = self . type_dependencies . successors ( TypeDependencies . NoDeps ) while candidates : new_candidates = list ( ) for n in candidates : for p in list ( self . type_dependencies . predecessors ( n ) ) : if nx . has_path ( self . type_dependencies , n , p ) : self . type_dependencies . remove_edge ( p , n ) if n not in self . type_dependencies . successors ( n ) : new_candidates . extend ( self . type_dependencies . successors ( n ) ) candidates = new_candidates
Format type dependencies information to use if for reordering .
43,826
def visit_Module ( self , node ) : newbody = list ( ) olddef = list ( ) for stmt in node . body : if isinstance ( stmt , ast . FunctionDef ) : olddef . append ( stmt ) else : newbody . append ( stmt ) try : newdef = topological_sort ( self . type_dependencies , self . ordered_global_declarations ) newdef = [ f for f in newdef if isinstance ( f , ast . FunctionDef ) ] except nx . exception . NetworkXUnfeasible : raise PythranSyntaxError ( "Infinite function recursion" ) assert set ( newdef ) == set ( olddef ) , "A function have been lost..." node . body = newbody + newdef self . update = True return node
Keep everything but function definition then add sorted functions .
43,827
def visit ( self , node ) : old_omp = metadata . get ( node , OMPDirective ) node = super ( DeadCodeElimination , self ) . visit ( node ) if not metadata . get ( node , OMPDirective ) : for omp_directive in old_omp : metadata . add ( node , omp_directive ) return node
Add OMPDirective from the old node to the new one .
43,828
def save_intrinsic_alias ( module ) : for v in module . values ( ) : if isinstance ( v , dict ) : save_intrinsic_alias ( v ) else : IntrinsicAliases [ v ] = frozenset ( ( v , ) ) if isinstance ( v , Class ) : save_intrinsic_alias ( v . fields )
Recursively save default aliases for pythonic functions .
43,829
def visit_IfExp ( self , node ) : self . visit ( node . test ) rec = [ self . visit ( n ) for n in ( node . body , node . orelse ) ] return self . add ( node , set . union ( * rec ) )
Resulting node alias to either branch
43,830
def visit_Dict ( self , node ) : if node . keys : elts_aliases = set ( ) for key , val in zip ( node . keys , node . values ) : self . visit ( key ) elt_aliases = self . visit ( val ) elts_aliases . update ( map ( ContainerOf , elt_aliases ) ) else : elts_aliases = None return self . add ( node , elts_aliases )
A dict is abstracted as an unordered container of its values
43,831
def visit_Set ( self , node ) : if node . elts : elts_aliases = { ContainerOf ( alias ) for elt in node . elts for alias in self . visit ( elt ) } else : elts_aliases = None return self . add ( node , elts_aliases )
A set is abstracted as an unordered container of its elements
43,832
def visit_Return ( self , node ) : if not node . value : return ret_aliases = self . visit ( node . value ) if Aliases . RetId in self . aliases : ret_aliases = ret_aliases . union ( self . aliases [ Aliases . RetId ] ) self . aliases [ Aliases . RetId ] = ret_aliases
A side effect of computing aliases on a Return is that it updates the return_alias field of current function
43,833
def visit_Subscript ( self , node ) : if isinstance ( node . slice , ast . Index ) : aliases = set ( ) self . visit ( node . slice ) value_aliases = self . visit ( node . value ) for alias in value_aliases : if isinstance ( alias , ContainerOf ) : if isinstance ( node . slice . value , ast . Slice ) : continue if isinstance ( node . slice . value , ast . Num ) : if node . slice . value . n != alias . index : continue aliases . add ( alias . containee ) elif isinstance ( getattr ( alias , 'ctx' , None ) , ast . Param ) : aliases . add ( ast . Subscript ( alias , node . slice , node . ctx ) ) else : aliases = None self . generic_visit ( node ) return self . add ( node , aliases )
Resulting node alias stores the subscript relationship if we don t know anything about the subscripted node .
43,834
def visit_Tuple ( self , node ) : if node . elts : elts_aliases = set ( ) for i , elt in enumerate ( node . elts ) : elt_aliases = self . visit ( elt ) elts_aliases . update ( ContainerOf ( alias , i ) for alias in elt_aliases ) else : elts_aliases = None return self . add ( node , elts_aliases )
A tuple is abstracted as an ordered container of its values
43,835
def visit_ListComp ( self , node ) : for generator in node . generators : self . visit_comprehension ( generator ) self . visit ( node . elt ) return self . add ( node )
A comprehension is not abstracted in any way
43,836
def visit_FunctionDef ( self , node ) : self . aliases = IntrinsicAliases . copy ( ) self . aliases . update ( ( f . name , { f } ) for f in self . global_declarations . values ( ) ) self . aliases . update ( ( arg . id , { arg } ) for arg in node . args . args ) self . generic_visit ( node ) if Aliases . RetId in self . aliases : def parametrize ( exp ) : if isinstance ( exp , ( ast . Index , Intrinsic , ast . FunctionDef ) ) : return lambda _ : { exp } elif isinstance ( exp , ContainerOf ) : pcontainee = parametrize ( exp . containee ) index = exp . index return lambda args : { ContainerOf ( pc , index ) for pc in pcontainee ( args ) } elif isinstance ( exp , ast . Name ) : try : w = node . args . args . index ( exp ) def return_alias ( args ) : if w < len ( args ) : return { args [ w ] } else : return { node . args . defaults [ w - len ( args ) ] } return return_alias except ValueError : return lambda _ : self . get_unbound_value_set ( ) elif isinstance ( exp , ast . Subscript ) : values = parametrize ( exp . value ) slices = parametrize ( exp . slice ) return lambda args : { ast . Subscript ( value , slice , ast . Load ( ) ) for value in values ( args ) for slice in slices ( args ) } else : return lambda _ : self . get_unbound_value_set ( ) return_aliases = [ parametrize ( ret_alias ) for ret_alias in self . aliases [ Aliases . RetId ] ] def merge_return_aliases ( args ) : merged_return_aliases = set ( ) for return_alias in return_aliases : merged_return_aliases . update ( return_alias ( args ) ) return merged_return_aliases node . return_alias = merge_return_aliases
Initialise aliasing default value before visiting .
43,837
def visit_For ( self , node ) : iter_aliases = self . visit ( node . iter ) if all ( isinstance ( x , ContainerOf ) for x in iter_aliases ) : target_aliases = set ( ) for iter_alias in iter_aliases : target_aliases . add ( iter_alias . containee ) else : target_aliases = { node . target } self . add ( node . target , target_aliases ) self . aliases [ node . target . id ] = self . result [ node . target ] self . generic_visit ( node ) self . generic_visit ( node )
For loop creates aliasing between the target and the content of the iterator
43,838
def prepare ( self , node ) : super ( ArgumentReadOnce , self ) . prepare ( node ) for n in self . global_declarations . values ( ) : fe = ArgumentReadOnce . FunctionEffects ( n ) self . node_to_functioneffect [ n ] = fe self . result . add ( fe ) def save_effect ( module ) : for intr in module . values ( ) : if isinstance ( intr , dict ) : save_effect ( intr ) else : fe = ArgumentReadOnce . FunctionEffects ( intr ) self . node_to_functioneffect [ intr ] = fe self . result . add ( fe ) if isinstance ( intr , intrinsic . Class ) : save_effect ( intr . fields ) for module in MODULES . values ( ) : save_effect ( module )
Initialise arguments effects as this analysis in inter - procedural .
43,839
def ds9_objects_to_string ( regions , coordsys = 'fk5' , fmt = '.6f' , radunit = 'deg' ) : shapelist = to_shape_list ( regions , coordsys ) return shapelist . to_ds9 ( coordsys , fmt , radunit )
Converts a list of ~regions . Region to DS9 region string .
43,840
def write_ds9 ( regions , filename , coordsys = 'fk5' , fmt = '.6f' , radunit = 'deg' ) : output = ds9_objects_to_string ( regions , coordsys , fmt , radunit ) with open ( filename , 'w' ) as fh : fh . write ( output )
Converts a list of ~regions . Region to DS9 string and write to file .
43,841
def crtf_objects_to_string ( regions , coordsys = 'fk5' , fmt = '.6f' , radunit = 'deg' ) : shapelist = to_shape_list ( regions , coordsys ) return shapelist . to_crtf ( coordsys , fmt , radunit )
Converts a list of ~regions . Region to CRTF region string .
43,842
def write_crtf ( regions , filename , coordsys = 'fk5' , fmt = '.6f' , radunit = 'deg' ) : output = crtf_objects_to_string ( regions , coordsys , fmt , radunit ) with open ( filename , 'w' ) as fh : fh . write ( output )
Converts a list of ~regions . Region to CRTF string and write to file .
43,843
def corners ( self ) : corners = [ ( - self . width / 2 , - self . height / 2 ) , ( self . width / 2 , - self . height / 2 ) , ( self . width / 2 , self . height / 2 ) , ( - self . width / 2 , self . height / 2 ) , ] rotmat = [ [ np . cos ( self . angle ) , np . sin ( self . angle ) ] , [ - np . sin ( self . angle ) , np . cos ( self . angle ) ] ] return np . dot ( corners , rotmat ) + np . array ( [ self . center . x , self . center . y ] )
Return the x y coordinate pairs that define the corners
43,844
def to_polygon ( self ) : x , y = self . corners . T vertices = PixCoord ( x = x , y = y ) return PolygonPixelRegion ( vertices = vertices , meta = self . meta , visual = self . visual )
Return a 4 - cornered polygon equivalent to this rectangle
43,845
def _lower_left_xy ( self ) : hw = self . width / 2. hh = self . height / 2. sint = np . sin ( self . angle ) cost = np . cos ( self . angle ) dx = ( hh * sint ) - ( hw * cost ) dy = - ( hh * cost ) - ( hw * sint ) x = self . center . x + dx y = self . center . y + dy return x , y
Compute lower left xy position .
43,846
def _make_annulus_path ( patch_inner , patch_outer ) : import matplotlib . path as mpath path_inner = patch_inner . get_path ( ) transform_inner = patch_inner . get_transform ( ) path_inner = transform_inner . transform_path ( path_inner ) path_outer = patch_outer . get_path ( ) transform_outer = patch_outer . get_transform ( ) path_outer = transform_outer . transform_path ( path_outer ) verts_inner = path_inner . vertices [ : - 1 ] [ : : - 1 ] verts_inner = np . concatenate ( ( verts_inner , [ verts_inner [ - 1 ] ] ) ) verts = np . vstack ( ( path_outer . vertices , verts_inner ) ) codes = np . hstack ( ( path_outer . codes , path_inner . codes ) ) return mpath . Path ( verts , codes )
Defines a matplotlib annulus path from two patches .
43,847
def read_fits_region ( filename , errors = 'strict' ) : regions = [ ] hdul = fits . open ( filename ) for hdu in hdul : if hdu . name == 'REGION' : table = Table . read ( hdu ) wcs = WCS ( hdu . header , keysel = [ 'image' , 'binary' , 'pixel' ] ) regions_list = FITSRegionParser ( table , errors ) . shapes . to_regions ( ) for reg in regions_list : regions . append ( reg . to_sky ( wcs ) ) return regions
Reads a FITS region file and scans for any fits regions table and converts them into Region objects .
43,848
def to_shape_list ( region_list , coordinate_system = 'fk5' ) : shape_list = ShapeList ( ) for region in region_list : coord = [ ] if isinstance ( region , SkyRegion ) : reg_type = region . __class__ . __name__ [ : - 9 ] . lower ( ) else : reg_type = region . __class__ . __name__ [ : - 11 ] . lower ( ) for val in regions_attributes [ reg_type ] : coord . append ( getattr ( region , val ) ) if reg_type == 'polygon' : coord = [ x for x in region . vertices ] if coordinate_system : coordsys = coordinate_system else : if isinstance ( region , SkyRegion ) : coordsys = coord [ 0 ] . name else : coordsys = 'image' frame = coordinates . frame_transform_graph . lookup_name ( coordsys ) new_coord = [ ] for val in coord : if isinstance ( val , Angle ) or isinstance ( val , u . Quantity ) or isinstance ( val , numbers . Number ) : new_coord . append ( val ) elif isinstance ( val , PixCoord ) : new_coord . append ( u . Quantity ( val . x , u . dimensionless_unscaled ) ) new_coord . append ( u . Quantity ( val . y , u . dimensionless_unscaled ) ) else : new_coord . append ( Angle ( val . transform_to ( frame ) . spherical . lon ) ) new_coord . append ( Angle ( val . transform_to ( frame ) . spherical . lat ) ) meta = dict ( region . meta ) meta . update ( region . visual ) if reg_type == 'text' : meta [ 'text' ] = meta . get ( 'text' , meta . pop ( 'label' , '' ) ) include = region . meta . pop ( 'include' , True ) shape_list . append ( Shape ( coordsys , reg_type , new_coord , meta , False , include ) ) return shape_list
Converts a list of regions into a regions . ShapeList object .
43,849
def to_ds9_meta ( shape_meta ) : valid_keys = [ 'symbol' , 'include' , 'tag' , 'line' , 'comment' , 'name' , 'select' , 'highlite' , 'fixed' , 'label' , 'text' , 'edit' , 'move' , 'rotate' , 'delete' , 'source' , 'background' ] valid_keys += [ 'color' , 'dash' , 'linewidth' , 'font' , 'dashlist' , 'fill' , 'textangle' , 'symsize' ] key_mappings = { 'symbol' : 'point' , 'linewidth' : 'width' , 'label' : 'text' } meta = _to_io_meta ( shape_meta , valid_keys , key_mappings ) if 'font' in meta : meta [ 'font' ] += " {0} {1} {2}" . format ( shape_meta . get ( 'fontsize' , 12 ) , shape_meta . get ( 'fontstyle' , 'normal' ) , shape_meta . get ( 'fontweight' , 'roman' ) ) return meta
Makes the meta data DS9 compatible by filtering and mapping the valid keys
43,850
def _to_io_meta ( shape_meta , valid_keys , key_mappings ) : meta = dict ( ) for key in shape_meta : if key in valid_keys : meta [ key_mappings . get ( key , key ) ] = shape_meta [ key ] return meta
This is used to make meta data compatible with a specific io by filtering and mapping to it s valid keys
43,851
def convert_coords ( self ) : if self . coordsys in [ 'image' , 'physical' ] : coords = self . _convert_pix_coords ( ) else : coords = self . _convert_sky_coords ( ) if self . region_type == 'line' : coords = [ coords [ 0 ] [ 0 ] , coords [ 0 ] [ 1 ] ] if self . region_type == 'text' : coords . append ( self . meta [ 'text' ] ) return coords
Process list of coordinates
43,852
def _convert_sky_coords ( self ) : parsed_angles = [ ( x , y ) for x , y in zip ( self . coord [ : - 1 : 2 ] , self . coord [ 1 : : 2 ] ) if ( isinstance ( x , coordinates . Angle ) and isinstance ( y , coordinates . Angle ) ) ] frame = coordinates . frame_transform_graph . lookup_name ( self . coordsys ) lon , lat = zip ( * parsed_angles ) if hasattr ( lon , '__len__' ) and hasattr ( lat , '__len__' ) and len ( lon ) == 1 and len ( lat ) == 1 : lon , lat = u . Quantity ( lon [ 0 ] ) , u . Quantity ( lat [ 0 ] ) else : lon , lat = u . Quantity ( lon ) , u . Quantity ( lat ) sphcoords = coordinates . UnitSphericalRepresentation ( lon , lat ) coords = [ SkyCoord ( frame ( sphcoords ) ) ] if self . region_type != 'polygon' : coords += self . coord [ len ( coords * 2 ) : ] return coords
Convert to sky coordinates
43,853
def _convert_pix_coords ( self ) : if self . region_type in [ 'polygon' , 'line' ] : coords = [ PixCoord ( self . coord [ 0 : : 2 ] , self . coord [ 1 : : 2 ] ) ] else : temp = [ _ . value for _ in self . coord ] coord = PixCoord ( temp [ 0 ] , temp [ 1 ] ) coords = [ coord ] + temp [ 2 : ] if self . region_type in [ 'ellipse' , 'rectangle' ] and len ( coords ) % 2 == 0 : coords [ - 1 ] = self . coord [ - 1 ] return coords
Convert to pixel coordinates regions . PixCoord
43,854
def to_region ( self ) : coords = self . convert_coords ( ) log . debug ( coords ) viz_keywords = [ 'color' , 'dash' , 'dashlist' , 'width' , 'font' , 'symsize' , 'symbol' , 'symsize' , 'fontsize' , 'fontstyle' , 'usetex' , 'labelpos' , 'labeloff' , 'linewidth' , 'linestyle' , 'point' , 'textangle' , 'fontweight' ] if isinstance ( coords [ 0 ] , SkyCoord ) : reg = self . shape_to_sky_region [ self . region_type ] ( * coords ) elif isinstance ( coords [ 0 ] , PixCoord ) : reg = self . shape_to_pixel_region [ self . region_type ] ( * coords ) else : self . _raise_error ( "No central coordinate" ) reg . visual = RegionVisual ( ) reg . meta = RegionMeta ( ) label = self . meta . get ( 'text' , self . meta . get ( 'label' , "" ) ) if label != '' : reg . meta [ 'label' ] = label for key in self . meta : if key in viz_keywords : reg . visual [ key ] = self . meta [ key ] else : reg . meta [ key ] = self . meta [ key ] reg . meta [ 'include' ] = self . include return reg
Converts to region regions . Region object
43,855
def check_crtf ( self ) : if self . region_type not in regions_attributes : raise ValueError ( "'{0}' is not a valid region type in this package" "supported by CRTF" . format ( self . region_type ) ) if self . coordsys not in valid_coordsys [ 'CRTF' ] : raise ValueError ( "'{0}' is not a valid coordinate reference frame in " "astropy supported by CRTF" . format ( self . coordsys ) )
Checks for CRTF compatibility .
43,856
def check_ds9 ( self ) : if self . region_type not in regions_attributes : raise ValueError ( "'{0}' is not a valid region type in this package" "supported by DS9" . format ( self . region_type ) ) if self . coordsys not in valid_coordsys [ 'DS9' ] : raise ValueError ( "'{0}' is not a valid coordinate reference frame " "in astropy supported by DS9" . format ( self . coordsys ) )
Checks for DS9 compatibility .
43,857
def _validate ( self ) : if self . region_type not in regions_attributes : raise ValueError ( "'{0}' is not a valid region type in this package" . format ( self . region_type ) ) if self . coordsys not in valid_coordsys [ 'DS9' ] + valid_coordsys [ 'CRTF' ] : raise ValueError ( "'{0}' is not a valid coordinate reference frame " "in astropy" . format ( self . coordsys ) )
Checks whether all the attributes of this object is valid .
43,858
def read_crtf ( filename , errors = 'strict' ) : with open ( filename ) as fh : if regex_begin . search ( fh . readline ( ) ) : region_string = fh . read ( ) parser = CRTFParser ( region_string , errors ) return parser . shapes . to_regions ( ) else : raise CRTFRegionParserError ( 'Every CRTF Region must start with "#CRTF" ' )
Reads a CRTF region file and returns a list of region objects .
43,859
def parse_line ( self , line ) : if line == '' : return if regex_comment . search ( line ) : return global_parameters = regex_global . search ( line ) if global_parameters : self . parse_global_meta ( global_parameters . group ( 'parameters' ) ) return crtf_line = regex_line . search ( line ) if crtf_line : region = regex_region . search ( crtf_line . group ( 'region' ) ) type_ = region . group ( 'type' ) or 'reg' include = region . group ( 'include' ) or '+' region_type = region . group ( 'regiontype' ) . lower ( ) if region_type in self . valid_definition : helper = CRTFRegionParser ( self . global_meta , include , type_ , region_type , * crtf_line . group ( 'region' , 'parameters' ) ) self . shapes . append ( helper . shape ) else : self . _raise_error ( "Not a valid CRTF Region type: '{0}'." . format ( region_type ) ) else : self . _raise_error ( "Not a valid CRTF line: '{0}'." . format ( line ) ) return
Parses a single line .
43,860
def parse ( self ) : self . convert_meta ( ) self . coordsys = self . meta . get ( 'coord' , 'image' ) . lower ( ) self . set_coordsys ( ) self . convert_coordinates ( ) self . make_shape ( )
Starting point to parse the CRTF region string .
43,861
def set_coordsys ( self ) : if self . coordsys . lower ( ) in self . coordsys_mapping : self . coordsys = self . coordsys_mapping [ self . coordsys . lower ( ) ]
Mapping to astropy s coordinate system name
43,862
def convert_coordinates ( self ) : coord_list_str = regex_coordinate . findall ( self . reg_str ) + regex_length . findall ( self . reg_str ) coord_list = [ ] if self . region_type == 'poly' : if len ( coord_list_str ) < 4 : self . _raise_error ( 'Not in proper format: {} polygon should have > 4 coordinates' . format ( self . reg_str ) ) if coord_list_str [ 0 ] != coord_list_str [ - 1 ] : self . _raise_error ( "Not in proper format: '{0}', " "In polygon, the last and first coordinates should be same" . format ( self . reg_str ) ) else : if len ( coord_list_str ) != len ( self . language_spec [ self . region_type ] ) : self . _raise_error ( "Not in proper format: '{0}', " "Does not contain expected number of parameters for the region '{1}'" . format ( self . reg_str , self . region_type ) ) for attr_spec , val_str in zip ( self . language_spec [ self . region_type ] , coord_list_str ) : if attr_spec == 'c' : if len ( val_str ) == 2 and val_str [ 1 ] != '' : coord_list . append ( CoordinateParser . parse_coordinate ( val_str [ 0 ] ) ) coord_list . append ( CoordinateParser . parse_coordinate ( val_str [ 1 ] ) ) else : self . _raise_error ( "Not in proper format: {0} should be a coordinate" . format ( val_str ) ) if attr_spec == 'pl' : if len ( val_str ) == 2 and val_str [ 1 ] != '' : coord_list . append ( CoordinateParser . parse_angular_length_quantity ( val_str [ 0 ] ) ) coord_list . append ( CoordinateParser . parse_angular_length_quantity ( val_str [ 1 ] ) ) else : self . _raise_error ( "Not in proper format: {0} should be a pair of length" . format ( val_str ) ) if attr_spec == 'l' : if isinstance ( val_str , six . string_types ) : coord_list . append ( CoordinateParser . parse_angular_length_quantity ( val_str ) ) else : self . _raise_error ( "Not in proper format: {0} should be a single length" . format ( val_str ) ) if attr_spec == 's' : if self . region_type == 'symbol' : if val_str in valid_symbols : self . meta [ 'symbol' ] = val_str else : self . _raise_error ( "Not in proper format: '{0}' should be a symbol" . format ( val_str ) ) elif self . region_type == 'text' : self . meta [ 'text' ] = val_str [ 1 : - 1 ] self . coord = coord_list
Convert coordinate string to ~astropy . coordinates . Angle or ~astropy . units . quantity . Quantity objects
43,863
def convert_meta ( self ) : if self . meta_str : self . meta_str = regex_meta . findall ( self . meta_str + ',' ) if self . meta_str : for par in self . meta_str : if par [ 0 ] is not '' : val1 = par [ 0 ] val2 = par [ 1 ] else : val1 = par [ 2 ] val2 = par [ 3 ] val1 = val1 . strip ( ) val2 = val2 . strip ( ) if val1 in CRTFParser . valid_global_keys or val1 == 'label' : if val1 in ( 'range' , 'corr' , 'labeloff' ) : val2 = val2 . split ( ',' ) val2 = [ x . strip ( ) for x in val2 ] self . meta [ val1 ] = val2 else : self . _raise_error ( "'{0}' is not a valid meta key" . format ( val1 ) ) self . meta [ 'include' ] = self . include != '-' self . include = self . meta [ 'include' ] if 'range' in self . meta : self . meta [ 'range' ] = [ u . Quantity ( x ) for x in self . meta [ 'range' ] ] self . meta [ 'type' ] = self . type_
Parses the meta_str to python dictionary and stores in meta attribute .
43,864
def fits_region_objects_to_table ( regions ) : for reg in regions : if isinstance ( reg , SkyRegion ) : raise TypeError ( 'Every region must be a pixel region' . format ( reg ) ) shape_list = to_shape_list ( regions , coordinate_system = 'image' ) return shape_list . to_fits ( )
Converts list of regions to FITS region table .
43,865
def write_fits_region ( filename , regions , header = None ) : output = fits_region_objects_to_table ( regions ) bin_table = fits . BinTableHDU ( data = output , header = header ) bin_table . writeto ( filename )
Converts list of regions to FITS region table and write to a file .
43,866
def make_example_dataset ( data = 'simulated' , config = None ) : if data == 'simulated' : return ExampleDatasetSimulated ( config = config ) elif data == 'fermi' : return ExampleDatasetFermi ( config = config ) else : raise ValueError ( 'Invalid selection data: {}' . format ( data ) )
Make example dataset .
43,867
def _table_to_bintable ( table ) : data = table . as_array ( ) header = fits . Header ( ) header . update ( table . meta ) name = table . meta . pop ( 'name' , None ) return fits . BinTableHDU ( data , header , name = name )
Convert ~astropy . table . Table to astropy . io . fits . BinTable .
43,868
def read_ds9 ( filename , errors = 'strict' ) : with open ( filename ) as fh : region_string = fh . read ( ) parser = DS9Parser ( region_string , errors = errors ) return parser . shapes . to_regions ( )
Read a DS9 region file in as a list of ~regions . Region objects .
43,869
def set_coordsys ( self , coordsys ) : if coordsys in self . coordsys_mapping : self . coordsys = self . coordsys_mapping [ coordsys ] else : self . coordsys = coordsys
Transform coordinate system
43,870
def run ( self ) : for line_ in self . region_string . split ( '\n' ) : for line in line_ . split ( ";" ) : self . parse_line ( line ) log . debug ( 'Global state: {}' . format ( self ) )
Run all steps
43,871
def parse_meta ( meta_str ) : keys_vals = [ ( x , y ) for x , _ , y in regex_meta . findall ( meta_str . strip ( ) ) ] extra_text = regex_meta . split ( meta_str . strip ( ) ) [ - 1 ] result = OrderedDict ( ) for key , val in keys_vals : val = val . strip ( ) . strip ( "'" ) . strip ( '"' ) if key == 'text' : val = val . lstrip ( "{" ) . rstrip ( "}" ) if key in result : if key == 'tag' : result [ key ] . append ( val ) else : raise ValueError ( "Duplicate key {0} found" . format ( key ) ) else : if key == 'tag' : result [ key ] = [ val ] else : result [ key ] = val if extra_text : result [ 'comment' ] = extra_text return result
Parse the metadata for a single ds9 region string .
43,872
def parse_region ( self , include , region_type , region_end , line ) : if self . coordsys is None : raise DS9RegionParserError ( "No coordinate system specified and a" " region has been found." ) else : helper = DS9RegionParser ( coordsys = self . coordsys , include = include , region_type = region_type , region_end = region_end , global_meta = self . global_meta , line = line ) helper . parse ( ) self . shapes . append ( helper . shape )
Extract a Shape from a region string
43,873
def parse ( self ) : log . debug ( self ) self . parse_composite ( ) self . split_line ( ) self . convert_coordinates ( ) self . convert_meta ( ) self . make_shape ( ) log . debug ( self )
Convert line to shape object
43,874
def split_line ( self ) : hash_or_end = self . line . find ( "#" ) temp = self . line [ self . region_end : hash_or_end ] . strip ( " |" ) self . coord_str = regex_paren . sub ( "" , temp ) if hash_or_end >= 0 : self . meta_str = self . line [ hash_or_end : ] else : self . meta_str = ""
Split line into coordinates and meta string
43,875
def convert_coordinates ( self ) : coord_list = [ ] elements = [ x for x in regex_splitter . split ( self . coord_str ) if x ] element_parsers = self . language_spec [ self . region_type ] for ii , ( element , element_parser ) in enumerate ( zip ( elements , element_parsers ) ) : if element_parser is coordinate : unit = self . coordinate_units [ self . coordsys ] [ ii % 2 ] coord_list . append ( element_parser ( element , unit ) ) elif self . coordinate_units [ self . coordsys ] [ 0 ] is u . dimensionless_unscaled : coord_list . append ( element_parser ( element , unit = u . dimensionless_unscaled ) ) else : coord_list . append ( element_parser ( element ) ) if self . region_type in [ 'ellipse' , 'box' ] and len ( coord_list ) % 2 == 1 : coord_list [ - 1 ] = CoordinateParser . parse_angular_length_quantity ( elements [ len ( coord_list ) - 1 ] ) if self . region_type in [ 'ellipse' , 'annulus' ] : self . language_spec [ self . region_type ] = itertools . chain ( ( coordinate , coordinate ) , itertools . cycle ( ( radius , ) ) ) self . coord = coord_list
Convert coordinate string to objects
43,876
def convert_meta ( self ) : meta_ = DS9Parser . parse_meta ( self . meta_str ) self . meta = copy . deepcopy ( self . global_meta ) self . meta . update ( meta_ ) self . include = self . meta . get ( 'include' , True ) if self . include == '' else self . include != '-' self . meta [ 'include' ] = self . include
Convert meta string to dict
43,877
def _validate ( val , name , expected = 'any' ) : if not isinstance ( val , PixCoord ) : raise TypeError ( '{} must be a PixCoord' . format ( name ) ) if expected == 'any' : pass elif expected == 'scalar' : if not val . isscalar : raise ValueError ( '{} must be a scalar PixCoord' . format ( name ) ) elif expected == 'not scalar' : if val . isscalar : raise ValueError ( '{} must be a non-scalar PixCoord' . format ( name ) ) else : raise ValueError ( 'Invalid argument for `expected`: {}' . format ( expected ) ) return val
Validate that a given object is an appropriate PixCoord .
43,878
def to_sky ( self , wcs , origin = _DEFAULT_WCS_ORIGIN , mode = _DEFAULT_WCS_MODE ) : return SkyCoord . from_pixel ( xp = self . x , yp = self . y , wcs = wcs , origin = origin , mode = mode , )
Convert this PixCoord to ~astropy . coordinates . SkyCoord .
43,879
def from_sky ( cls , skycoord , wcs , origin = _DEFAULT_WCS_ORIGIN , mode = _DEFAULT_WCS_MODE ) : x , y = skycoord . to_pixel ( wcs = wcs , origin = origin , mode = mode ) return cls ( x = x , y = y )
Create PixCoord from ~astropy . coordinates . SkyCoord .
43,880
def separation ( self , other ) : r dx = other . x - self . x dy = other . y - self . y return np . hypot ( dx , dy )
r Separation to another pixel coordinate .
43,881
def skycoord_to_pixel_scale_angle ( skycoord , wcs , small_offset = 1 * u . arcsec ) : x , y = skycoord_to_pixel ( skycoord , wcs , mode = skycoord_to_pixel_mode ) pixcoord = PixCoord ( x = x , y = y ) r_old = skycoord . represent_as ( 'unitspherical' ) dlat = small_offset r_new = UnitSphericalRepresentation ( r_old . lon , r_old . lat + dlat ) coords_offset = skycoord . realize_frame ( r_new ) x_offset , y_offset = skycoord_to_pixel ( coords_offset , wcs , mode = skycoord_to_pixel_mode ) dx = x_offset - x dy = y_offset - y scale = np . hypot ( dx , dy ) / dlat . to ( 'degree' ) . value angle = np . arctan2 ( dy , dx ) * u . radian return pixcoord , scale , angle
Convert a set of SkyCoord coordinates into pixel coordinates pixel scales and position angles .
43,882
def assert_angle ( name , q ) : if isinstance ( q , u . Quantity ) : if q . unit . physical_type == 'angle' : pass else : raise ValueError ( "{0} should have angular units" . format ( name ) ) else : raise TypeError ( "{0} should be a Quantity instance" . format ( name ) )
Check that q is an angular ~astropy . units . Quantity .
43,883
def _silence ( ) : old_stdout = sys . stdout old_stderr = sys . stderr sys . stdout = _DummyFile ( ) sys . stderr = _DummyFile ( ) exception_occurred = False try : yield except : exception_occurred = True sys . stdout = old_stdout sys . stderr = old_stderr raise if not exception_occurred : sys . stdout = old_stdout sys . stderr = old_stderr
A context manager that silences sys . stdout and sys . stderr .
43,884
def use_astropy_helpers ( ** kwargs ) : global BOOTSTRAPPER config = BOOTSTRAPPER . config config . update ( ** kwargs ) BOOTSTRAPPER = _Bootstrapper ( ** config ) BOOTSTRAPPER . run ( )
Ensure that the astropy_helpers module is available and is importable . This supports automatic submodule initialization if astropy_helpers is included in a project as a git submodule or will download it from PyPI if necessary .
43,885
def config ( self ) : return dict ( ( optname , getattr ( self , optname ) ) for optname , _ in CFG_OPTIONS if hasattr ( self , optname ) )
A dict containing the options this _Bootstrapper was configured with .
43,886
def get_local_directory_dist ( self ) : if not os . path . isdir ( self . path ) : return log . info ( 'Attempting to import astropy_helpers from {0} {1!r}' . format ( 'submodule' if self . is_submodule else 'directory' , self . path ) ) dist = self . _directory_import ( ) if dist is None : log . warn ( 'The requested path {0!r} for importing {1} does not ' 'exist, or does not contain a copy of the {1} ' 'package.' . format ( self . path , PACKAGE_NAME ) ) elif self . auto_upgrade and not self . is_submodule : upgrade = self . _do_upgrade ( dist ) if upgrade is not None : dist = upgrade return dist
Handle importing a vendored package from a subdirectory of the source distribution .
43,887
def get_local_file_dist ( self ) : if not os . path . isfile ( self . path ) : return log . info ( 'Attempting to unpack and import astropy_helpers from ' '{0!r}' . format ( self . path ) ) try : dist = self . _do_download ( find_links = [ self . path ] ) except Exception as e : if DEBUG : raise log . warn ( 'Failed to import {0} from the specified archive {1!r}: ' '{2}' . format ( PACKAGE_NAME , self . path , str ( e ) ) ) dist = None if dist is not None and self . auto_upgrade : upgrade = self . _do_upgrade ( dist ) if upgrade is not None : dist = upgrade return dist
Handle importing from a source archive ; this also uses setup_requires but points easy_install directly to the source archive .
43,888
def _directory_import ( self ) : path = os . path . abspath ( self . path ) ws = pkg_resources . WorkingSet ( [ ] ) ws . add_entry ( path ) dist = ws . by_key . get ( DIST_NAME ) if dist is None : setup_py = os . path . join ( path , 'setup.py' ) if os . path . isfile ( setup_py ) : sp . check_output ( [ sys . executable , 'setup.py' , 'egg_info' ] , cwd = path ) for dist in pkg_resources . find_distributions ( path , True ) : return dist return dist
Import astropy_helpers from the given path which will be added to sys . path .
43,889
def _check_submodule ( self ) : if ( self . path is None or ( os . path . exists ( self . path ) and not os . path . isdir ( self . path ) ) ) : return False if self . use_git : return self . _check_submodule_using_git ( ) else : return self . _check_submodule_no_git ( )
Check if the given path is a git submodule .
43,890
def sdot ( U , V ) : nu = U . ndim return np . tensordot ( U , V , axes = ( nu - 1 , 0 ) )
Computes the tensorproduct reducing last dimensoin of U with first dimension of V . For matrices it is equal to regular matrix product .
43,891
def set_values ( self , x ) : x = numpy . atleast_2d ( x ) x = x . real C_inv = self . __C_inv__ theta = numpy . dot ( x , C_inv ) self . theta = theta return theta
Updates self . theta parameter . No returns values
43,892
def tauchen ( N , mu , rho , sigma , m = 2 ) : Z = np . zeros ( ( N , 1 ) ) Zprob = np . zeros ( ( N , N ) ) a = ( 1 - rho ) * mu Z [ - 1 ] = m * math . sqrt ( sigma ** 2 / ( 1 - ( rho ** 2 ) ) ) Z [ 0 ] = - 1 * Z [ - 1 ] zstep = ( Z [ - 1 ] - Z [ 0 ] ) / ( N - 1 ) for i in range ( 1 , N ) : Z [ i ] = Z [ 0 ] + zstep * ( i ) Z = Z + a / ( 1 - rho ) for j in range ( 0 , N ) : for k in range ( 0 , N ) : if k == 0 : Zprob [ j , k ] = sp . stats . norm . cdf ( ( Z [ 0 ] - a - rho * Z [ j ] + zstep / 2 ) / sigma ) elif k == ( N - 1 ) : Zprob [ j , k ] = 1 - sp . stats . norm . cdf ( ( Z [ - 1 ] - a - rho * Z [ j ] - zstep / 2 ) / sigma ) else : up = sp . stats . norm . cdf ( ( Z [ k ] - a - rho * Z [ j ] + zstep / 2 ) / sigma ) down = sp . stats . norm . cdf ( ( Z [ k ] - a - rho * Z [ j ] - zstep / 2 ) / sigma ) Zprob [ j , k ] = up - down return ( ( Z , Zprob ) )
Approximate an AR1 process by a finite markov chain using Tauchen s method .
43,893
def rouwenhorst ( rho , sigma , N ) : from numpy import sqrt , linspace , array , zeros sigma = float ( sigma ) if N == 1 : nodes = array ( [ 0.0 ] ) transitions = array ( [ [ 1.0 ] ] ) return [ nodes , transitions ] p = ( rho + 1 ) / 2 q = p nu = sqrt ( ( N - 1 ) / ( 1 - rho ** 2 ) ) * sigma nodes = linspace ( - nu , nu , N ) sig_a = sigma n = 1 mat0 = array ( [ [ p , 1 - p ] , [ 1 - q , q ] ] ) if N == 2 : return [ nodes , mat0 ] for n in range ( 3 , N + 1 ) : mat = zeros ( ( n , n ) ) mat_A = mat . copy ( ) mat_B = mat . copy ( ) mat_C = mat . copy ( ) mat_D = mat . copy ( ) mat_A [ : - 1 , : - 1 ] = mat0 mat_B [ : - 1 , 1 : ] = mat0 mat_C [ 1 : , : - 1 ] = mat0 mat_D [ 1 : , 1 : ] = mat0 mat0 = p * mat_A + ( 1 - p ) * mat_B + ( 1 - q ) * mat_C + q * mat_D mat0 [ 1 : - 1 , : ] = mat0 [ 1 : - 1 , : ] / 2 P = mat0 return [ nodes , P ]
Approximate an AR1 process by a finite markov chain using Rouwenhorst s method .
43,894
def tensor_markov ( * args ) : if len ( args ) > 2 : m1 = args [ 0 ] m2 = args [ 1 ] tail = args [ 2 : ] prod = tensor_markov ( m1 , m2 ) return tensor_markov ( prod , tail ) elif len ( args ) == 2 : m1 , m2 = args n1 , t1 = m1 n2 , t2 = m2 n1 = np . array ( n1 , dtype = float ) n2 = np . array ( n2 , dtype = float ) t1 = np . array ( t1 , dtype = float ) t2 = np . array ( t2 , dtype = float ) assert ( n1 . shape [ 0 ] == t1 . shape [ 0 ] == t1 . shape [ 1 ] ) assert ( n2 . shape [ 0 ] == t2 . shape [ 0 ] == t2 . shape [ 1 ] ) t = np . kron ( t1 , t2 ) p = t1 . shape [ 0 ] q = t2 . shape [ 0 ] np . tile ( n2 , ( 1 , p ) ) n = np . column_stack ( [ np . repeat ( n1 , q , axis = 0 ) , np . tile ( n2 , ( p , 1 ) ) ] ) return [ n , t ] else : raise Exception ( "Incorrect number of arguments. Expected at least 2. Found {}." . format ( len ( args ) ) )
Computes the product of two independent markov chains .
43,895
def dynare_import ( filename , full_output = False , debug = False ) : import os basename = os . path . basename ( filename ) fname = re . compile ( '(.*)\.(.*)' ) . match ( basename ) . group ( 1 ) f = open ( filename ) txt = f . read ( ) model = parse_dynare_text ( txt , full_output = full_output , debug = debug ) model . name = fname return model
Imports model defined in specified file
43,896
def _shocks_to_epsilons ( model , shocks , T ) : n_e = len ( model . calibration [ 'exogenous' ] ) if isinstance ( shocks , pd . DataFrame ) : shocks = { k : shocks [ k ] . tolist ( ) for k in shocks . columns } if isinstance ( shocks , dict ) : epsilons = np . zeros ( ( T + 1 , n_e ) ) for ( i , k ) in enumerate ( model . symbols [ "exogenous" ] ) : if k in shocks : this_shock = shocks [ k ] epsilons [ : len ( this_shock ) , i ] = this_shock epsilons [ len ( this_shock ) : , i ] = this_shock [ - 1 ] else : epsilons [ : , i ] = model . calibration [ "exogenous" ] [ i ] return epsilons if shocks is None : shocks = model . calibration [ "exogenous" ] shocks = np . asarray ( shocks ) shocks = shocks . reshape ( ( - 1 , n_e ) ) epsilons = np . zeros ( ( T + 1 , n_e ) ) epsilons [ : ( shocks . shape [ 0 ] - 1 ) , : ] = shocks [ 1 : , : ] epsilons [ ( shocks . shape [ 0 ] - 1 ) : , : ] = shocks [ - 1 : , : ] return epsilons
Helper function to support input argument shocks being one of many different data types . Will always return a T n_e matrix .
43,897
def clear_all ( ) : frame = inspect . currentframe ( ) . f_back try : if frame . f_globals . get ( 'variables_order' ) : del frame . f_globals [ 'variables_order' ] if frame . f_globals . get ( 'parameters_order' ) : del frame . f_globals [ 'parameters_order' ] finally : del frame
Clears all parameters variables and shocks defined previously
43,898
def nonlinear_system ( model , initial_dr = None , maxit = 10 , tol = 1e-8 , grid = { } , distribution = { } , verbose = True ) : if verbose : headline = '|{0:^4} | {1:10} | {2:8} |' headline = headline . format ( 'N' , ' Error' , 'Time' ) stars = '-' * len ( headline ) print ( stars ) print ( headline ) print ( stars ) fmt_str = '|{0:4} | {1:10.3e} | {2:8.3f} |' f = model . functions [ 'arbitrage' ] g = model . functions [ 'transition' ] p = model . calibration [ 'parameters' ] distrib = model . get_distribution ( ** distribution ) nodes , weights = distrib . discretize ( ) approx = model . get_grid ( ** grid ) ms = create_interpolator ( approx , approx . interpolation ) grid = ms . grid if initial_dr is None : dr = approximate_controls ( model ) else : dr = initial_dr ms . set_values ( dr ( grid ) ) x = dr ( grid ) x0 = x . copy ( ) it = 0 err = 10 a0 = x0 . copy ( ) . reshape ( ( x0 . shape [ 0 ] * x0 . shape [ 1 ] , ) ) a = a0 . copy ( ) while err > tol and it < maxit : it += 1 t1 = time . time ( ) r , da = residuals ( f , g , grid , a . reshape ( x0 . shape ) , ms , nodes , weights , p , diff = True ) [ : 2 ] r = r . flatten ( ) err = abs ( r ) . max ( ) t2 = time . time ( ) if verbose : print ( fmt_str . format ( it , err , t2 - t1 ) ) if err > tol : a -= scipy . sparse . linalg . spsolve ( da , r ) if verbose : print ( stars ) return ms
Finds a global solution for model by solving one large system of equations using a simple newton algorithm .
43,899
def gauss_hermite_nodes ( orders , sigma , mu = None ) : if isinstance ( orders , int ) : orders = [ orders ] import numpy if mu is None : mu = numpy . array ( [ 0 ] * sigma . shape [ 0 ] ) herms = [ hermgauss ( i ) for i in orders ] points = [ h [ 0 ] * numpy . sqrt ( 2 ) for h in herms ] weights = [ h [ 1 ] / numpy . sqrt ( numpy . pi ) for h in herms ] if len ( orders ) == 1 : x = numpy . array ( points [ 0 ] ) * numpy . sqrt ( float ( sigma ) ) if sigma . ndim == 2 : x = x [ : , None ] w = weights [ 0 ] return [ x , w ] else : x = cartesian ( points ) . T from functools import reduce w = reduce ( numpy . kron , weights ) zero_columns = numpy . where ( sigma . sum ( axis = 0 ) == 0 ) [ 0 ] for i in zero_columns : sigma [ i , i ] = 1.0 C = numpy . linalg . cholesky ( sigma ) x = numpy . dot ( C , x ) + mu [ : , numpy . newaxis ] x = numpy . ascontiguousarray ( x . T ) for i in zero_columns : x [ : , i ] = 0 return [ x , w ]
Computes the weights and nodes for Gauss Hermite quadrature .