idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
231,700
def visit_ExceptHandler ( self , node ) : currs = ( node , ) raises = ( ) for n in node . body : self . result . add_node ( n ) for curr in currs : self . result . add_edge ( curr , n ) currs , nraises = self . visit ( n ) raises += nraises return currs , raises
OUT = body s RAISES = body s
82
10
231,701
def visit_Name ( self , node ) : if isinstance ( node . ctx , ast . Store ) : self . result [ node . id ] = True
Stored variable have new value .
34
7
231,702
def add ( self , variable , range_ ) : if variable not in self . result : self . result [ variable ] = range_ else : self . result [ variable ] = self . result [ variable ] . union ( range_ ) return self . result [ variable ]
Add a new low and high bound for a variable .
56
11
231,703
def visit_Assign ( self , node ) : assigned_range = self . visit ( node . value ) for target in node . targets : if isinstance ( target , ast . Name ) : # Make sure all Interval doesn't alias for multiple variables. self . add ( target . id , assigned_range ) else : self . visit ( target )
Set range value for assigned variable .
74
7
231,704
def visit_AugAssign ( self , node ) : self . generic_visit ( node ) if isinstance ( node . target , ast . Name ) : name = node . target . id res = combine ( node . op , self . result [ name ] , self . result [ node . value ] ) self . result [ name ] = self . result [ name ] . union ( res )
Update range value for augassigned variables .
83
9
231,705
def visit_For ( self , node ) : assert isinstance ( node . target , ast . Name ) , "For apply on variables." self . visit ( node . iter ) if isinstance ( node . iter , ast . Call ) : for alias in self . aliases [ node . iter . func ] : if isinstance ( alias , Intrinsic ) : self . add ( node . target . id , alias . return_range_content ( [ self . visit ( n ) for n in node . iter . args ] ) ) self . visit_loop ( node )
Handle iterate variable in for loops .
120
8
231,706
def visit_loop ( self , node , cond = None ) : # visit once to gather newly declared vars for stmt in node . body : self . visit ( stmt ) # freeze current state old_range = self . result . copy ( ) # extra round for stmt in node . body : self . visit ( stmt ) # widen any change for expr , range_ in old_range . items ( ) : self . result [ expr ] = self . result [ expr ] . widen ( range_ ) # propagate the new informations cond and self . visit ( cond ) for stmt in node . body : self . visit ( stmt ) for stmt in node . orelse : self . visit ( stmt )
Handle incremented variables in loop body .
154
8
231,707
def visit_BoolOp ( self , node ) : res = list ( zip ( * [ self . visit ( elt ) . bounds ( ) for elt in node . values ] ) ) return self . add ( node , Interval ( min ( res [ 0 ] ) , max ( res [ 1 ] ) ) )
Merge right and left operands ranges .
68
9
231,708
def visit_BinOp ( self , node ) : res = combine ( node . op , self . visit ( node . left ) , self . visit ( node . right ) ) return self . add ( node , res )
Combine operands ranges for given operator .
47
9
231,709
def visit_UnaryOp ( self , node ) : res = self . visit ( node . operand ) if isinstance ( node . op , ast . Not ) : res = Interval ( 0 , 1 ) elif ( isinstance ( node . op , ast . Invert ) and isinstance ( res . high , int ) and isinstance ( res . low , int ) ) : res = Interval ( ~ res . high , ~ res . low ) elif isinstance ( node . op , ast . UAdd ) : pass elif isinstance ( node . op , ast . USub ) : res = Interval ( - res . high , - res . low ) else : res = UNKNOWN_RANGE return self . add ( node , res )
Update range with given unary operation .
161
8
231,710
def visit_If ( self , node ) : self . visit ( node . test ) old_range = self . result self . result = old_range . copy ( ) for stmt in node . body : self . visit ( stmt ) body_range = self . result self . result = old_range . copy ( ) for stmt in node . orelse : self . visit ( stmt ) orelse_range = self . result self . result = body_range for k , v in orelse_range . items ( ) : if k in self . result : self . result [ k ] = self . result [ k ] . union ( v ) else : self . result [ k ] = v
Handle iterate variable across branches
152
6
231,711
def visit_IfExp ( self , node ) : self . visit ( node . test ) body_res = self . visit ( node . body ) orelse_res = self . visit ( node . orelse ) return self . add ( node , orelse_res . union ( body_res ) )
Use worst case for both possible values .
67
8
231,712
def visit_Compare ( self , node ) : if any ( isinstance ( op , ( ast . In , ast . NotIn , ast . Is , ast . IsNot ) ) for op in node . ops ) : self . generic_visit ( node ) return self . add ( node , Interval ( 0 , 1 ) ) curr = self . visit ( node . left ) res = [ ] for op , comparator in zip ( node . ops , node . comparators ) : comparator = self . visit ( comparator ) fake = ast . Compare ( ast . Name ( 'x' , ast . Load ( ) , None ) , [ op ] , [ ast . Name ( 'y' , ast . Load ( ) , None ) ] ) fake = ast . Expression ( fake ) ast . fix_missing_locations ( fake ) expr = compile ( ast . gast_to_ast ( fake ) , '<range_values>' , 'eval' ) res . append ( eval ( expr , { 'x' : curr , 'y' : comparator } ) ) if all ( res ) : return self . add ( node , Interval ( 1 , 1 ) ) elif any ( r . low == r . high == 0 for r in res ) : return self . add ( node , Interval ( 0 , 0 ) ) else : return self . add ( node , Interval ( 0 , 1 ) )
Boolean are possible index .
302
6
231,713
def visit_Call ( self , node ) : for alias in self . aliases [ node . func ] : if alias is MODULES [ '__builtin__' ] [ 'getattr' ] : attr_name = node . args [ - 1 ] . s attribute = attributes [ attr_name ] [ - 1 ] self . add ( node , attribute . return_range ( None ) ) elif isinstance ( alias , Intrinsic ) : alias_range = alias . return_range ( [ self . visit ( n ) for n in node . args ] ) self . add ( node , alias_range ) else : return self . generic_visit ( node ) return self . result [ node ]
Function calls are not handled for now .
152
8
231,714
def visit_Num ( self , node ) : if isinstance ( node . n , int ) : return self . add ( node , Interval ( node . n , node . n ) ) return UNKNOWN_RANGE
Handle literals integers values .
46
6
231,715
def visit_Name ( self , node ) : return self . add ( node , self . result [ node . id ] )
Get range for parameters for examples or false branching .
26
10
231,716
def generic_visit ( self , node ) : super ( RangeValues , self ) . generic_visit ( node ) return self . add ( node , UNKNOWN_RANGE )
Other nodes are not known and range value neither .
39
10
231,717
def compile_flags ( args ) : compiler_options = { 'define_macros' : args . defines , 'undef_macros' : args . undefs , 'include_dirs' : args . include_dirs , 'extra_compile_args' : args . extra_flags , 'library_dirs' : args . libraries_dir , 'extra_link_args' : args . extra_flags , } for param in ( 'opts' , ) : val = getattr ( args , param , None ) if val : compiler_options [ param ] = val return compiler_options
Build a dictionnary with an entry for cppflags ldflags and cxxflags .
132
20
231,718
def find_matching_builtin ( self , node ) : for path in EQUIVALENT_ITERATORS . keys ( ) : correct_alias = { path_to_node ( path ) } if self . aliases [ node . func ] == correct_alias : return path
Return matched keyword .
61
4
231,719
def visit_Module ( self , node ) : self . generic_visit ( node ) import_alias = ast . alias ( name = 'itertools' , asname = mangle ( 'itertools' ) ) if self . use_itertools : importIt = ast . Import ( names = [ import_alias ] ) node . body . insert ( 0 , importIt ) return node
Add itertools import for imap izip or ifilter iterator .
85
17
231,720
def visit_Call ( self , node ) : if node in self . potential_iterator : matched_path = self . find_matching_builtin ( node ) if matched_path is None : return self . generic_visit ( node ) # Special handling for map which can't be turn to imap with None as # a parameter as map(None, [1, 2]) == [1, 2] while # list(imap(None, [1, 2])) == [(1,), (2,)] if ( matched_path [ 1 ] == "map" and MODULES [ "__builtin__" ] [ "None" ] in self . aliases [ node . args [ 0 ] ] ) : return self . generic_visit ( node ) # if a dtype conversion is implied if matched_path [ 1 ] in ( 'array' , 'asarray' ) and len ( node . args ) != 1 : return self . generic_visit ( node ) path = EQUIVALENT_ITERATORS [ matched_path ] if path : node . func = path_to_attr ( path ) self . use_itertools |= path [ 0 ] == 'itertools' else : node = node . args [ 0 ] self . update = True return self . generic_visit ( node )
Replace function call by its correct iterator if it is possible .
285
13
231,721
def run ( xmin , ymin , xmax , ymax , step , range_ , range_x , range_y , t ) : X , Y = t . shape pt = np . zeros ( ( X , Y ) ) for i in range ( X ) : for j in range ( Y ) : for k in t : tmp = 6368. * np . arccos ( np . cos ( xmin + step * i ) * np . cos ( k [ 0 ] ) * np . cos ( ( ymin + step * j ) - k [ 1 ] ) + np . sin ( xmin + step * i ) * np . sin ( k [ 0 ] ) ) if tmp < range_ : pt [ i ] [ j ] += k [ 2 ] / ( 1 + tmp ) return pt
omp parallel for
173
3
231,722
def max_values ( args ) : return Interval ( max ( x . low for x in args ) , max ( x . high for x in args ) )
Return possible range for max function .
34
7
231,723
def min_values ( args ) : return Interval ( min ( x . low for x in args ) , min ( x . high for x in args ) )
Return possible range for min function .
34
7
231,724
def union ( self , other ) : return Interval ( min ( self . low , other . low ) , max ( self . high , other . high ) )
Intersect current range with other .
34
7
231,725
def widen ( self , other ) : if self . low < other . low : low = - float ( "inf" ) else : low = self . low if self . high > other . high : high = float ( "inf" ) else : high = self . high return Interval ( low , high )
Widen current range .
65
5
231,726
def handle_keywords ( self , func , node , offset = 0 ) : func_argument_names = { } for i , arg in enumerate ( func . args . args [ offset : ] ) : assert isinstance ( arg , ast . Name ) func_argument_names [ arg . id ] = i nargs = len ( func . args . args ) - offset defaults = func . args . defaults keywords = { func_argument_names [ kw . arg ] : kw . value for kw in node . keywords } node . args . extend ( [ None ] * ( 1 + max ( keywords . keys ( ) ) - len ( node . args ) ) ) replacements = { } for index , arg in enumerate ( node . args ) : if arg is None : if index in keywords : replacements [ index ] = deepcopy ( keywords [ index ] ) else : # must be a default value replacements [ index ] = deepcopy ( defaults [ index - nargs ] ) return replacements
Gather keywords to positional argument information
209
7
231,727
def update_effects ( self , node ) : return [ self . combine ( node . args [ 0 ] , node_args_k , register = True , aliasing_type = True ) for node_args_k in node . args [ 1 : ] ]
Combiner when we update the first argument of a function .
55
12
231,728
def save_method ( elements , module_path ) : for elem , signature in elements . items ( ) : if isinstance ( signature , dict ) : # Submodule case save_method ( signature , module_path + ( elem , ) ) elif isinstance ( signature , Class ) : save_method ( signature . fields , module_path + ( elem , ) ) elif signature . ismethod ( ) : # in case of duplicates, there must be a __dispatch__ record # and it is the only recorded one if elem in methods and module_path [ 0 ] != '__dispatch__' : assert elem in MODULES [ '__dispatch__' ] path = ( '__dispatch__' , ) methods [ elem ] = ( path , MODULES [ '__dispatch__' ] [ elem ] ) else : methods [ elem ] = ( module_path , signature )
Recursively save methods with module name and signature .
200
11
231,729
def save_function ( elements , module_path ) : for elem , signature in elements . items ( ) : if isinstance ( signature , dict ) : # Submodule case save_function ( signature , module_path + ( elem , ) ) elif signature . isstaticfunction ( ) : functions . setdefault ( elem , [ ] ) . append ( ( module_path , signature , ) ) elif isinstance ( signature , Class ) : save_function ( signature . fields , module_path + ( elem , ) )
Recursively save functions with module name and signature .
114
11
231,730
def save_attribute ( elements , module_path ) : for elem , signature in elements . items ( ) : if isinstance ( signature , dict ) : # Submodule case save_attribute ( signature , module_path + ( elem , ) ) elif signature . isattribute ( ) : assert elem not in attributes # we need unicity attributes [ elem ] = ( module_path , signature , ) elif isinstance ( signature , Class ) : save_attribute ( signature . fields , module_path + ( elem , ) )
Recursively save attributes with module name and signature .
115
11
231,731
def visit_Assign ( self , node ) : self . generic_visit ( node ) if node . value not in self . fixed_size_list : return node node . value = self . convert ( node . value ) return node
Replace list calls by static_list calls when possible
50
11
231,732
def copy_pkg ( self , pkg , src_only = False ) : if getattr ( self , 'no_' + pkg ) is None : print ( 'Copying boost dependencies' ) to_copy = pkg , else : return src = os . path . join ( 'third_party' , * to_copy ) # copy to the build tree if not src_only : target = os . path . join ( self . build_lib , 'pythran' , * to_copy ) shutil . rmtree ( target , True ) shutil . copytree ( src , target ) # copy them to the source tree too, needed for sdist target = os . path . join ( 'pythran' , * to_copy ) shutil . rmtree ( target , True ) shutil . copytree ( src , target )
Install boost deps from the third_party directory
183
10
231,733
def check_list ( self , node_list , pattern_list ) : if len ( node_list ) != len ( pattern_list ) : return False else : return all ( Check ( node_elt , self . placeholders ) . visit ( pattern_list [ i ] ) for i , node_elt in enumerate ( node_list ) )
Check if list of node are equal .
74
8
231,734
def visit_Placeholder ( self , pattern ) : if ( pattern . id in self . placeholders and not Check ( self . node , self . placeholders ) . visit ( self . placeholders [ pattern . id ] ) ) : return False else : self . placeholders [ pattern . id ] = self . node return True
Save matching node or compare it with the existing one .
68
11
231,735
def visit_AST_or ( self , pattern ) : return any ( self . field_match ( self . node , value_or ) for value_or in pattern . args )
Match if any of the or content match with the other node .
38
13
231,736
def visit_Set ( self , pattern ) : if len ( pattern . elts ) > MAX_UNORDERED_LENGTH : raise DamnTooLongPattern ( "Pattern for Set is too long" ) return ( isinstance ( self . node , Set ) and any ( self . check_list ( self . node . elts , pattern_elts ) for pattern_elts in permutations ( pattern . elts ) ) )
Set have unordered values .
92
6
231,737
def visit_Dict ( self , pattern ) : if not isinstance ( self . node , Dict ) : return False if len ( pattern . keys ) > MAX_UNORDERED_LENGTH : raise DamnTooLongPattern ( "Pattern for Dict is too long" ) for permutation in permutations ( range ( len ( self . node . keys ) ) ) : for i , value in enumerate ( permutation ) : if not self . field_match ( self . node . keys [ i ] , pattern . keys [ value ] ) : break else : pattern_values = [ pattern . values [ i ] for i in permutation ] return self . check_list ( self . node . values , pattern_values ) return False
Dict can match with unordered values .
156
9
231,738
def field_match ( self , node_field , pattern_field ) : is_good_list = ( isinstance ( pattern_field , list ) and self . check_list ( node_field , pattern_field ) ) is_good_node = ( isinstance ( pattern_field , AST ) and Check ( node_field , self . placeholders ) . visit ( pattern_field ) ) def strict_eq ( f0 , f1 ) : try : return f0 == f1 or ( isnan ( f0 ) and isnan ( f1 ) ) except TypeError : return f0 == f1 is_same = strict_eq ( pattern_field , node_field ) return is_good_list or is_good_node or is_same
Check if two fields match .
162
6
231,739
def generic_visit ( self , pattern ) : return ( isinstance ( pattern , type ( self . node ) ) and all ( self . field_match ( value , getattr ( pattern , field ) ) for field , value in iter_fields ( self . node ) ) )
Check if the pattern match with the checked node .
59
10
231,740
def visit ( self , node ) : if Check ( node , dict ( ) ) . visit ( self . pattern ) : self . result . add ( node ) self . generic_visit ( node )
Visitor looking for matching between current node and pattern .
42
11
231,741
def visit_Call ( self , node ) : md . visit ( self , node ) for arg in node . args : self . visit ( arg ) self . func_args_lazyness ( node . func , node . args , node ) self . visit ( node . func )
Compute use of variables in a function call .
59
10
231,742
def n_queens ( queen_count ) : out = list ( ) cols = range ( queen_count ) #for vec in permutations(cols): for vec in permutations ( cols , None ) : if ( queen_count == len ( set ( vec [ i ] + i for i in cols ) ) == len ( set ( vec [ i ] - i for i in cols ) ) ) : #yield vec out . append ( vec ) return out
N - Queens solver .
102
6
231,743
def visit_Stmt ( self , node ) : save_defs , self . defs = self . defs or list ( ) , list ( ) self . generic_visit ( node ) new_defs , self . defs = self . defs , save_defs return new_defs + [ node ]
Add new variable definition before the Statement .
70
8
231,744
def visit_Call ( self , node ) : func_aliases = self . aliases [ node . func ] if len ( func_aliases ) == 1 : function_def = next ( iter ( func_aliases ) ) if ( isinstance ( function_def , ast . FunctionDef ) and function_def . name in self . inlinable ) : self . update = True to_inline = copy . deepcopy ( self . inlinable [ function_def . name ] ) arg_to_value = dict ( ) values = node . args values += to_inline . args . defaults [ len ( node . args ) - len ( to_inline . args . args ) : ] for arg_fun , arg_call in zip ( to_inline . args . args , values ) : v_name = "__pythran_inline{}{}{}" . format ( function_def . name , arg_fun . id , self . call_count ) new_var = ast . Name ( id = v_name , ctx = ast . Store ( ) , annotation = None ) self . defs . append ( ast . Assign ( targets = [ new_var ] , value = arg_call ) ) arg_to_value [ arg_fun . id ] = ast . Name ( id = v_name , ctx = ast . Load ( ) , annotation = None ) self . call_count += 1 return Inliner ( arg_to_value ) . visit ( to_inline . body [ 0 ] ) return node
Replace function call by inlined function s body .
325
11
231,745
def size_container_folding ( value ) : if len ( value ) < MAX_LEN : if isinstance ( value , list ) : return ast . List ( [ to_ast ( elt ) for elt in value ] , ast . Load ( ) ) elif isinstance ( value , tuple ) : return ast . Tuple ( [ to_ast ( elt ) for elt in value ] , ast . Load ( ) ) elif isinstance ( value , set ) : return ast . Set ( [ to_ast ( elt ) for elt in value ] ) elif isinstance ( value , dict ) : keys = [ to_ast ( elt ) for elt in value . keys ( ) ] values = [ to_ast ( elt ) for elt in value . values ( ) ] return ast . Dict ( keys , values ) elif isinstance ( value , np . ndarray ) : return ast . Call ( func = ast . Attribute ( ast . Name ( mangle ( 'numpy' ) , ast . Load ( ) , None ) , 'array' , ast . Load ( ) ) , args = [ to_ast ( totuple ( value . tolist ( ) ) ) , ast . Attribute ( ast . Name ( mangle ( 'numpy' ) , ast . Load ( ) , None ) , value . dtype . name , ast . Load ( ) ) ] , keywords = [ ] ) else : raise ConversionError ( ) else : raise ToNotEval ( )
Convert value to ast expression if size is not too big .
325
13
231,746
def builtin_folding ( value ) : if isinstance ( value , ( type ( None ) , bool ) ) : name = str ( value ) elif value . __name__ in ( "bool" , "float" , "int" ) : name = value . __name__ + "_" else : name = value . __name__ return ast . Attribute ( ast . Name ( '__builtin__' , ast . Load ( ) , None ) , name , ast . Load ( ) )
Convert builtin function to ast expression .
107
9
231,747
def to_ast ( value ) : if isinstance ( value , ( type ( None ) , bool ) ) : return builtin_folding ( value ) if sys . version_info [ 0 ] == 2 and isinstance ( value , long ) : from pythran . syntax import PythranSyntaxError raise PythranSyntaxError ( "constant folding results in big int" ) if any ( value is t for t in ( bool , int , float ) ) : iinfo = np . iinfo ( int ) if isinstance ( value , int ) and not ( iinfo . min <= value <= iinfo . max ) : from pythran . syntax import PythranSyntaxError raise PythranSyntaxError ( "constant folding results in big int" ) return builtin_folding ( value ) elif isinstance ( value , np . generic ) : return to_ast ( np . asscalar ( value ) ) elif isinstance ( value , numbers . Number ) : return ast . Num ( value ) elif isinstance ( value , str ) : return ast . Str ( value ) elif isinstance ( value , ( list , tuple , set , dict , np . ndarray ) ) : return size_container_folding ( value ) elif hasattr ( value , "__module__" ) and value . __module__ == "__builtin__" : # TODO Can be done the same way for others modules return builtin_folding ( value ) # only meaningful for python3 elif sys . version_info . major == 3 : if isinstance ( value , ( filter , map , zip ) ) : return to_ast ( list ( value ) ) raise ToNotEval ( )
Turn a value into ast expression .
366
7
231,748
def visit_Module ( self , node ) : duc = SilentDefUseChains ( ) duc . visit ( node ) for d in duc . locals [ node ] : self . result [ d . name ( ) ] = d . node
Import module define a new variable name .
52
8
231,749
def attr_to_path ( node ) : def get_intrinsic_path ( modules , attr ) : """ Get function path and intrinsic from an ast.Attribute. """ if isinstance ( attr , ast . Name ) : return modules [ demangle ( attr . id ) ] , ( demangle ( attr . id ) , ) elif isinstance ( attr , ast . Attribute ) : module , path = get_intrinsic_path ( modules , attr . value ) return module [ attr . attr ] , path + ( attr . attr , ) obj , path = get_intrinsic_path ( MODULES , node ) if not obj . isliteral ( ) : path = path [ : - 1 ] + ( 'functor' , path [ - 1 ] ) return obj , ( 'pythonic' , ) + path
Compute path and final object for an attribute node
192
10
231,750
def path_to_attr ( path ) : return reduce ( lambda hpath , last : ast . Attribute ( hpath , last , ast . Load ( ) ) , path [ 1 : ] , ast . Name ( mangle ( path [ 0 ] ) , ast . Load ( ) , None ) )
Transform path to ast . Attribute .
64
8
231,751
def get_variable ( assignable ) : msg = "Only name and subscript can be assigned." assert isinstance ( assignable , ( ast . Name , ast . Subscript ) ) , msg while isinstance ( assignable , ast . Subscript ) or isattr ( assignable ) : if isattr ( assignable ) : assignable = assignable . args [ 0 ] else : assignable = assignable . value return assignable
Return modified variable name .
90
5
231,752
def prepare ( self , node ) : super ( Reorder , self ) . prepare ( node ) candidates = self . type_dependencies . successors ( TypeDependencies . NoDeps ) # We first select function which may have a result without calling any # others functions. # Then we check if no loops type dependencies exists. If it exists, we # can safely remove the dependency as it could be compute without this # information. # As we can compute type for this function, successors can potentially # be computed # FIXME: This is false in some cases # # def bar(i): # if i > 0: # return foo(i) # else: # return [] # # def foo(i): # return [len(bar(i-1)) + len(bar(i - 2))] # # If we check for function without deps first, we will pick bar and say # it returns empty list while candidates : new_candidates = list ( ) for n in candidates : # remove edges that imply a circular dependency for p in list ( self . type_dependencies . predecessors ( n ) ) : if nx . has_path ( self . type_dependencies , n , p ) : self . type_dependencies . remove_edge ( p , n ) if n not in self . type_dependencies . successors ( n ) : new_candidates . extend ( self . type_dependencies . successors ( n ) ) candidates = new_candidates
Format type dependencies information to use if for reordering .
309
11
231,753
def visit_Module ( self , node ) : newbody = list ( ) olddef = list ( ) for stmt in node . body : if isinstance ( stmt , ast . FunctionDef ) : olddef . append ( stmt ) else : newbody . append ( stmt ) try : newdef = topological_sort ( self . type_dependencies , self . ordered_global_declarations ) newdef = [ f for f in newdef if isinstance ( f , ast . FunctionDef ) ] except nx . exception . NetworkXUnfeasible : raise PythranSyntaxError ( "Infinite function recursion" ) assert set ( newdef ) == set ( olddef ) , "A function have been lost..." node . body = newbody + newdef self . update = True return node
Keep everything but function definition then add sorted functions .
173
10
231,754
def visit ( self , node ) : old_omp = metadata . get ( node , OMPDirective ) node = super ( DeadCodeElimination , self ) . visit ( node ) if not metadata . get ( node , OMPDirective ) : for omp_directive in old_omp : metadata . add ( node , omp_directive ) return node
Add OMPDirective from the old node to the new one .
79
14
231,755
def save_intrinsic_alias ( module ) : for v in module . values ( ) : if isinstance ( v , dict ) : # Submodules case save_intrinsic_alias ( v ) else : IntrinsicAliases [ v ] = frozenset ( ( v , ) ) if isinstance ( v , Class ) : save_intrinsic_alias ( v . fields )
Recursively save default aliases for pythonic functions .
89
11
231,756
def visit_IfExp ( self , node ) : self . visit ( node . test ) rec = [ self . visit ( n ) for n in ( node . body , node . orelse ) ] return self . add ( node , set . union ( * rec ) )
Resulting node alias to either branch
58
7
231,757
def visit_Dict ( self , node ) : if node . keys : elts_aliases = set ( ) for key , val in zip ( node . keys , node . values ) : self . visit ( key ) # res ignored, just to fill self.aliases elt_aliases = self . visit ( val ) elts_aliases . update ( map ( ContainerOf , elt_aliases ) ) else : elts_aliases = None return self . add ( node , elts_aliases )
A dict is abstracted as an unordered container of its values
112
13
231,758
def visit_Set ( self , node ) : if node . elts : elts_aliases = { ContainerOf ( alias ) for elt in node . elts for alias in self . visit ( elt ) } else : elts_aliases = None return self . add ( node , elts_aliases )
A set is abstracted as an unordered container of its elements
69
13
231,759
def visit_Return ( self , node ) : if not node . value : return ret_aliases = self . visit ( node . value ) if Aliases . RetId in self . aliases : ret_aliases = ret_aliases . union ( self . aliases [ Aliases . RetId ] ) self . aliases [ Aliases . RetId ] = ret_aliases
A side effect of computing aliases on a Return is that it updates the return_alias field of current function
79
21
231,760
def visit_Subscript ( self , node ) : if isinstance ( node . slice , ast . Index ) : aliases = set ( ) self . visit ( node . slice ) value_aliases = self . visit ( node . value ) for alias in value_aliases : if isinstance ( alias , ContainerOf ) : if isinstance ( node . slice . value , ast . Slice ) : continue if isinstance ( node . slice . value , ast . Num ) : if node . slice . value . n != alias . index : continue # FIXME: what if the index is a slice variable... aliases . add ( alias . containee ) elif isinstance ( getattr ( alias , 'ctx' , None ) , ast . Param ) : aliases . add ( ast . Subscript ( alias , node . slice , node . ctx ) ) else : # could be enhanced through better handling of containers aliases = None self . generic_visit ( node ) return self . add ( node , aliases )
Resulting node alias stores the subscript relationship if we don t know anything about the subscripted node .
211
20
231,761
def visit_Tuple ( self , node ) : if node . elts : elts_aliases = set ( ) for i , elt in enumerate ( node . elts ) : elt_aliases = self . visit ( elt ) elts_aliases . update ( ContainerOf ( alias , i ) for alias in elt_aliases ) else : elts_aliases = None return self . add ( node , elts_aliases )
A tuple is abstracted as an ordered container of its values
100
12
231,762
def visit_ListComp ( self , node ) : for generator in node . generators : self . visit_comprehension ( generator ) self . visit ( node . elt ) return self . add ( node )
A comprehension is not abstracted in any way
45
9
231,763
def visit_FunctionDef ( self , node ) : self . aliases = IntrinsicAliases . copy ( ) self . aliases . update ( ( f . name , { f } ) for f in self . global_declarations . values ( ) ) self . aliases . update ( ( arg . id , { arg } ) for arg in node . args . args ) self . generic_visit ( node ) if Aliases . RetId in self . aliases : # parametrize the expression def parametrize ( exp ) : # constant(?) or global -> no change if isinstance ( exp , ( ast . Index , Intrinsic , ast . FunctionDef ) ) : return lambda _ : { exp } elif isinstance ( exp , ContainerOf ) : pcontainee = parametrize ( exp . containee ) index = exp . index return lambda args : { ContainerOf ( pc , index ) for pc in pcontainee ( args ) } elif isinstance ( exp , ast . Name ) : try : w = node . args . args . index ( exp ) def return_alias ( args ) : if w < len ( args ) : return { args [ w ] } else : return { node . args . defaults [ w - len ( args ) ] } return return_alias except ValueError : return lambda _ : self . get_unbound_value_set ( ) elif isinstance ( exp , ast . Subscript ) : values = parametrize ( exp . value ) slices = parametrize ( exp . slice ) return lambda args : { ast . Subscript ( value , slice , ast . Load ( ) ) for value in values ( args ) for slice in slices ( args ) } else : return lambda _ : self . get_unbound_value_set ( ) # this is a little tricky: for each returned alias, # parametrize builds a function that, given a list of args, # returns the alias # then as we may have multiple returned alias, we compute the union # of these returned aliases return_aliases = [ parametrize ( ret_alias ) for ret_alias in self . aliases [ Aliases . RetId ] ] def merge_return_aliases ( args ) : merged_return_aliases = set ( ) for return_alias in return_aliases : merged_return_aliases . update ( return_alias ( args ) ) return merged_return_aliases node . return_alias = merge_return_aliases
Initialise aliasing default value before visiting .
534
9
231,764
def visit_For ( self , node ) : iter_aliases = self . visit ( node . iter ) if all ( isinstance ( x , ContainerOf ) for x in iter_aliases ) : target_aliases = set ( ) for iter_alias in iter_aliases : target_aliases . add ( iter_alias . containee ) else : target_aliases = { node . target } self . add ( node . target , target_aliases ) self . aliases [ node . target . id ] = self . result [ node . target ] self . generic_visit ( node ) self . generic_visit ( node )
For loop creates aliasing between the target and the content of the iterator
137
14
231,765
def prepare ( self , node ) : super ( ArgumentReadOnce , self ) . prepare ( node ) # global functions init for n in self . global_declarations . values ( ) : fe = ArgumentReadOnce . FunctionEffects ( n ) self . node_to_functioneffect [ n ] = fe self . result . add ( fe ) # Pythonic functions init def save_effect ( module ) : """ Recursively save read once effect for Pythonic functions. """ for intr in module . values ( ) : if isinstance ( intr , dict ) : # Submodule case save_effect ( intr ) else : fe = ArgumentReadOnce . FunctionEffects ( intr ) self . node_to_functioneffect [ intr ] = fe self . result . add ( fe ) if isinstance ( intr , intrinsic . Class ) : # Class case save_effect ( intr . fields ) for module in MODULES . values ( ) : save_effect ( module )
Initialise arguments effects as this analysis in inter - procedural .
199
12
231,766
def ds9_objects_to_string ( regions , coordsys = 'fk5' , fmt = '.6f' , radunit = 'deg' ) : shapelist = to_shape_list ( regions , coordsys ) return shapelist . to_ds9 ( coordsys , fmt , radunit )
Converts a list of ~regions . Region to DS9 region string .
73
16
231,767
def write_ds9 ( regions , filename , coordsys = 'fk5' , fmt = '.6f' , radunit = 'deg' ) : output = ds9_objects_to_string ( regions , coordsys , fmt , radunit ) with open ( filename , 'w' ) as fh : fh . write ( output )
Converts a list of ~regions . Region to DS9 string and write to file .
78
19
231,768
def crtf_objects_to_string ( regions , coordsys = 'fk5' , fmt = '.6f' , radunit = 'deg' ) : shapelist = to_shape_list ( regions , coordsys ) return shapelist . to_crtf ( coordsys , fmt , radunit )
Converts a list of ~regions . Region to CRTF region string .
72
16
231,769
def write_crtf ( regions , filename , coordsys = 'fk5' , fmt = '.6f' , radunit = 'deg' ) : output = crtf_objects_to_string ( regions , coordsys , fmt , radunit ) with open ( filename , 'w' ) as fh : fh . write ( output )
Converts a list of ~regions . Region to CRTF string and write to file .
77
19
231,770
def corners ( self ) : corners = [ ( - self . width / 2 , - self . height / 2 ) , ( self . width / 2 , - self . height / 2 ) , ( self . width / 2 , self . height / 2 ) , ( - self . width / 2 , self . height / 2 ) , ] rotmat = [ [ np . cos ( self . angle ) , np . sin ( self . angle ) ] , [ - np . sin ( self . angle ) , np . cos ( self . angle ) ] ] return np . dot ( corners , rotmat ) + np . array ( [ self . center . x , self . center . y ] )
Return the x y coordinate pairs that define the corners
144
10
231,771
def to_polygon ( self ) : x , y = self . corners . T vertices = PixCoord ( x = x , y = y ) return PolygonPixelRegion ( vertices = vertices , meta = self . meta , visual = self . visual )
Return a 4 - cornered polygon equivalent to this rectangle
57
12
231,772
def _lower_left_xy ( self ) : hw = self . width / 2. hh = self . height / 2. sint = np . sin ( self . angle ) cost = np . cos ( self . angle ) dx = ( hh * sint ) - ( hw * cost ) dy = - ( hh * cost ) - ( hw * sint ) x = self . center . x + dx y = self . center . y + dy return x , y
Compute lower left xy position .
105
8
231,773
def _make_annulus_path ( patch_inner , patch_outer ) : import matplotlib . path as mpath path_inner = patch_inner . get_path ( ) transform_inner = patch_inner . get_transform ( ) path_inner = transform_inner . transform_path ( path_inner ) path_outer = patch_outer . get_path ( ) transform_outer = patch_outer . get_transform ( ) path_outer = transform_outer . transform_path ( path_outer ) verts_inner = path_inner . vertices [ : - 1 ] [ : : - 1 ] verts_inner = np . concatenate ( ( verts_inner , [ verts_inner [ - 1 ] ] ) ) verts = np . vstack ( ( path_outer . vertices , verts_inner ) ) codes = np . hstack ( ( path_outer . codes , path_inner . codes ) ) return mpath . Path ( verts , codes )
Defines a matplotlib annulus path from two patches .
216
13
231,774
def read_fits_region ( filename , errors = 'strict' ) : regions = [ ] hdul = fits . open ( filename ) for hdu in hdul : if hdu . name == 'REGION' : table = Table . read ( hdu ) wcs = WCS ( hdu . header , keysel = [ 'image' , 'binary' , 'pixel' ] ) regions_list = FITSRegionParser ( table , errors ) . shapes . to_regions ( ) for reg in regions_list : regions . append ( reg . to_sky ( wcs ) ) return regions
Reads a FITS region file and scans for any fits regions table and converts them into Region objects .
131
21
231,775
def to_shape_list ( region_list , coordinate_system = 'fk5' ) : shape_list = ShapeList ( ) for region in region_list : coord = [ ] if isinstance ( region , SkyRegion ) : reg_type = region . __class__ . __name__ [ : - 9 ] . lower ( ) else : reg_type = region . __class__ . __name__ [ : - 11 ] . lower ( ) for val in regions_attributes [ reg_type ] : coord . append ( getattr ( region , val ) ) if reg_type == 'polygon' : coord = [ x for x in region . vertices ] if coordinate_system : coordsys = coordinate_system else : if isinstance ( region , SkyRegion ) : coordsys = coord [ 0 ] . name else : coordsys = 'image' frame = coordinates . frame_transform_graph . lookup_name ( coordsys ) new_coord = [ ] for val in coord : if isinstance ( val , Angle ) or isinstance ( val , u . Quantity ) or isinstance ( val , numbers . Number ) : new_coord . append ( val ) elif isinstance ( val , PixCoord ) : new_coord . append ( u . Quantity ( val . x , u . dimensionless_unscaled ) ) new_coord . append ( u . Quantity ( val . y , u . dimensionless_unscaled ) ) else : new_coord . append ( Angle ( val . transform_to ( frame ) . spherical . lon ) ) new_coord . append ( Angle ( val . transform_to ( frame ) . spherical . lat ) ) meta = dict ( region . meta ) meta . update ( region . visual ) if reg_type == 'text' : meta [ 'text' ] = meta . get ( 'text' , meta . pop ( 'label' , '' ) ) include = region . meta . pop ( 'include' , True ) shape_list . append ( Shape ( coordsys , reg_type , new_coord , meta , False , include ) ) return shape_list
Converts a list of regions into a regions . ShapeList object .
456
14
231,776
def to_ds9_meta ( shape_meta ) : # meta keys allowed in DS9. valid_keys = [ 'symbol' , 'include' , 'tag' , 'line' , 'comment' , 'name' , 'select' , 'highlite' , 'fixed' , 'label' , 'text' , 'edit' , 'move' , 'rotate' , 'delete' , 'source' , 'background' ] # visual keys allowed in DS9 valid_keys += [ 'color' , 'dash' , 'linewidth' , 'font' , 'dashlist' , 'fill' , 'textangle' , 'symsize' ] # mapped to actual names in DS9 key_mappings = { 'symbol' : 'point' , 'linewidth' : 'width' , 'label' : 'text' } meta = _to_io_meta ( shape_meta , valid_keys , key_mappings ) if 'font' in meta : meta [ 'font' ] += " {0} {1} {2}" . format ( shape_meta . get ( 'fontsize' , 12 ) , shape_meta . get ( 'fontstyle' , 'normal' ) , shape_meta . get ( 'fontweight' , 'roman' ) ) return meta
Makes the meta data DS9 compatible by filtering and mapping the valid keys
285
15
231,777
def _to_io_meta ( shape_meta , valid_keys , key_mappings ) : meta = dict ( ) for key in shape_meta : if key in valid_keys : meta [ key_mappings . get ( key , key ) ] = shape_meta [ key ] return meta
This is used to make meta data compatible with a specific io by filtering and mapping to it s valid keys
64
21
231,778
def convert_coords ( self ) : if self . coordsys in [ 'image' , 'physical' ] : coords = self . _convert_pix_coords ( ) else : coords = self . _convert_sky_coords ( ) if self . region_type == 'line' : coords = [ coords [ 0 ] [ 0 ] , coords [ 0 ] [ 1 ] ] if self . region_type == 'text' : coords . append ( self . meta [ 'text' ] ) return coords
Process list of coordinates
120
4
231,779
def _convert_sky_coords ( self ) : parsed_angles = [ ( x , y ) for x , y in zip ( self . coord [ : - 1 : 2 ] , self . coord [ 1 : : 2 ] ) if ( isinstance ( x , coordinates . Angle ) and isinstance ( y , coordinates . Angle ) ) ] frame = coordinates . frame_transform_graph . lookup_name ( self . coordsys ) lon , lat = zip ( * parsed_angles ) if hasattr ( lon , '__len__' ) and hasattr ( lat , '__len__' ) and len ( lon ) == 1 and len ( lat ) == 1 : # force entries to be scalar if they are length-1 lon , lat = u . Quantity ( lon [ 0 ] ) , u . Quantity ( lat [ 0 ] ) else : # otherwise, they are vector quantities lon , lat = u . Quantity ( lon ) , u . Quantity ( lat ) sphcoords = coordinates . UnitSphericalRepresentation ( lon , lat ) coords = [ SkyCoord ( frame ( sphcoords ) ) ] if self . region_type != 'polygon' : coords += self . coord [ len ( coords * 2 ) : ] return coords
Convert to sky coordinates
279
5
231,780
def _convert_pix_coords ( self ) : if self . region_type in [ 'polygon' , 'line' ] : # have to special-case polygon in the phys coord case # b/c can't typecheck when iterating as in sky coord case coords = [ PixCoord ( self . coord [ 0 : : 2 ] , self . coord [ 1 : : 2 ] ) ] else : temp = [ _ . value for _ in self . coord ] coord = PixCoord ( temp [ 0 ] , temp [ 1 ] ) coords = [ coord ] + temp [ 2 : ] # The angle remains as a quantity object. # Modulus check makes sure that it works for ellipse/rectangle annulus if self . region_type in [ 'ellipse' , 'rectangle' ] and len ( coords ) % 2 == 0 : coords [ - 1 ] = self . coord [ - 1 ] return coords
Convert to pixel coordinates regions . PixCoord
207
10
231,781
def to_region ( self ) : coords = self . convert_coords ( ) log . debug ( coords ) viz_keywords = [ 'color' , 'dash' , 'dashlist' , 'width' , 'font' , 'symsize' , 'symbol' , 'symsize' , 'fontsize' , 'fontstyle' , 'usetex' , 'labelpos' , 'labeloff' , 'linewidth' , 'linestyle' , 'point' , 'textangle' , 'fontweight' ] if isinstance ( coords [ 0 ] , SkyCoord ) : reg = self . shape_to_sky_region [ self . region_type ] ( * coords ) elif isinstance ( coords [ 0 ] , PixCoord ) : reg = self . shape_to_pixel_region [ self . region_type ] ( * coords ) else : self . _raise_error ( "No central coordinate" ) reg . visual = RegionVisual ( ) reg . meta = RegionMeta ( ) # both 'text' and 'label' should be set to the same value, where we # default to the 'text' value since that is the one used by ds9 regions label = self . meta . get ( 'text' , self . meta . get ( 'label' , "" ) ) if label != '' : reg . meta [ 'label' ] = label for key in self . meta : if key in viz_keywords : reg . visual [ key ] = self . meta [ key ] else : reg . meta [ key ] = self . meta [ key ] reg . meta [ 'include' ] = self . include return reg
Converts to region regions . Region object
364
8
231,782
def check_crtf ( self ) : if self . region_type not in regions_attributes : raise ValueError ( "'{0}' is not a valid region type in this package" "supported by CRTF" . format ( self . region_type ) ) if self . coordsys not in valid_coordsys [ 'CRTF' ] : raise ValueError ( "'{0}' is not a valid coordinate reference frame in " "astropy supported by CRTF" . format ( self . coordsys ) )
Checks for CRTF compatibility .
113
7
231,783
def check_ds9 ( self ) : if self . region_type not in regions_attributes : raise ValueError ( "'{0}' is not a valid region type in this package" "supported by DS9" . format ( self . region_type ) ) if self . coordsys not in valid_coordsys [ 'DS9' ] : raise ValueError ( "'{0}' is not a valid coordinate reference frame " "in astropy supported by DS9" . format ( self . coordsys ) )
Checks for DS9 compatibility .
113
7
231,784
def _validate ( self ) : if self . region_type not in regions_attributes : raise ValueError ( "'{0}' is not a valid region type in this package" . format ( self . region_type ) ) if self . coordsys not in valid_coordsys [ 'DS9' ] + valid_coordsys [ 'CRTF' ] : raise ValueError ( "'{0}' is not a valid coordinate reference frame " "in astropy" . format ( self . coordsys ) )
Checks whether all the attributes of this object is valid .
114
12
231,785
def read_crtf ( filename , errors = 'strict' ) : with open ( filename ) as fh : if regex_begin . search ( fh . readline ( ) ) : region_string = fh . read ( ) parser = CRTFParser ( region_string , errors ) return parser . shapes . to_regions ( ) else : raise CRTFRegionParserError ( 'Every CRTF Region must start with "#CRTF" ' )
Reads a CRTF region file and returns a list of region objects .
98
15
231,786
def parse_line ( self , line ) : # Skip blanks if line == '' : return # Skip comments if regex_comment . search ( line ) : return # Special case / header: parse global parameters into metadata global_parameters = regex_global . search ( line ) if global_parameters : self . parse_global_meta ( global_parameters . group ( 'parameters' ) ) return # Tries to check the validity of the line. crtf_line = regex_line . search ( line ) if crtf_line : # Tries to parse the line. # Finds info about the region. region = regex_region . search ( crtf_line . group ( 'region' ) ) type_ = region . group ( 'type' ) or 'reg' include = region . group ( 'include' ) or '+' region_type = region . group ( 'regiontype' ) . lower ( ) if region_type in self . valid_definition : helper = CRTFRegionParser ( self . global_meta , include , type_ , region_type , * crtf_line . group ( 'region' , 'parameters' ) ) self . shapes . append ( helper . shape ) else : self . _raise_error ( "Not a valid CRTF Region type: '{0}'." . format ( region_type ) ) else : self . _raise_error ( "Not a valid CRTF line: '{0}'." . format ( line ) ) return
Parses a single line .
319
7
231,787
def parse ( self ) : self . convert_meta ( ) self . coordsys = self . meta . get ( 'coord' , 'image' ) . lower ( ) self . set_coordsys ( ) self . convert_coordinates ( ) self . make_shape ( )
Starting point to parse the CRTF region string .
61
10
231,788
def set_coordsys ( self ) : if self . coordsys . lower ( ) in self . coordsys_mapping : self . coordsys = self . coordsys_mapping [ self . coordsys . lower ( ) ]
Mapping to astropy s coordinate system name
55
9
231,789
def convert_coordinates ( self ) : coord_list_str = regex_coordinate . findall ( self . reg_str ) + regex_length . findall ( self . reg_str ) coord_list = [ ] if self . region_type == 'poly' : if len ( coord_list_str ) < 4 : self . _raise_error ( 'Not in proper format: {} polygon should have > 4 coordinates' . format ( self . reg_str ) ) if coord_list_str [ 0 ] != coord_list_str [ - 1 ] : self . _raise_error ( "Not in proper format: '{0}', " "In polygon, the last and first coordinates should be same" . format ( self . reg_str ) ) else : if len ( coord_list_str ) != len ( self . language_spec [ self . region_type ] ) : self . _raise_error ( "Not in proper format: '{0}', " "Does not contain expected number of parameters for the region '{1}'" . format ( self . reg_str , self . region_type ) ) for attr_spec , val_str in zip ( self . language_spec [ self . region_type ] , coord_list_str ) : if attr_spec == 'c' : if len ( val_str ) == 2 and val_str [ 1 ] != '' : coord_list . append ( CoordinateParser . parse_coordinate ( val_str [ 0 ] ) ) coord_list . append ( CoordinateParser . parse_coordinate ( val_str [ 1 ] ) ) else : self . _raise_error ( "Not in proper format: {0} should be a coordinate" . format ( val_str ) ) if attr_spec == 'pl' : if len ( val_str ) == 2 and val_str [ 1 ] != '' : coord_list . append ( CoordinateParser . parse_angular_length_quantity ( val_str [ 0 ] ) ) coord_list . append ( CoordinateParser . parse_angular_length_quantity ( val_str [ 1 ] ) ) else : self . _raise_error ( "Not in proper format: {0} should be a pair of length" . format ( val_str ) ) if attr_spec == 'l' : if isinstance ( val_str , six . string_types ) : coord_list . append ( CoordinateParser . parse_angular_length_quantity ( val_str ) ) else : self . _raise_error ( "Not in proper format: {0} should be a single length" . format ( val_str ) ) if attr_spec == 's' : if self . region_type == 'symbol' : if val_str in valid_symbols : self . meta [ 'symbol' ] = val_str else : self . _raise_error ( "Not in proper format: '{0}' should be a symbol" . format ( val_str ) ) elif self . region_type == 'text' : self . meta [ 'text' ] = val_str [ 1 : - 1 ] self . coord = coord_list
Convert coordinate string to ~astropy . coordinates . Angle or ~astropy . units . quantity . Quantity objects
697
23
231,790
def convert_meta ( self ) : if self . meta_str : self . meta_str = regex_meta . findall ( self . meta_str + ',' ) if self . meta_str : for par in self . meta_str : if par [ 0 ] is not '' : val1 = par [ 0 ] val2 = par [ 1 ] else : val1 = par [ 2 ] val2 = par [ 3 ] val1 = val1 . strip ( ) val2 = val2 . strip ( ) if val1 in CRTFParser . valid_global_keys or val1 == 'label' : if val1 in ( 'range' , 'corr' , 'labeloff' ) : val2 = val2 . split ( ',' ) val2 = [ x . strip ( ) for x in val2 ] self . meta [ val1 ] = val2 else : self . _raise_error ( "'{0}' is not a valid meta key" . format ( val1 ) ) self . meta [ 'include' ] = self . include != '-' self . include = self . meta [ 'include' ] if 'range' in self . meta : self . meta [ 'range' ] = [ u . Quantity ( x ) for x in self . meta [ 'range' ] ] self . meta [ 'type' ] = self . type_
Parses the meta_str to python dictionary and stores in meta attribute .
293
16
231,791
def fits_region_objects_to_table ( regions ) : for reg in regions : if isinstance ( reg , SkyRegion ) : raise TypeError ( 'Every region must be a pixel region' . format ( reg ) ) shape_list = to_shape_list ( regions , coordinate_system = 'image' ) return shape_list . to_fits ( )
Converts list of regions to FITS region table .
78
11
231,792
def write_fits_region ( filename , regions , header = None ) : output = fits_region_objects_to_table ( regions ) bin_table = fits . BinTableHDU ( data = output , header = header ) bin_table . writeto ( filename )
Converts list of regions to FITS region table and write to a file .
58
16
231,793
def make_example_dataset ( data = 'simulated' , config = None ) : if data == 'simulated' : return ExampleDatasetSimulated ( config = config ) elif data == 'fermi' : return ExampleDatasetFermi ( config = config ) else : raise ValueError ( 'Invalid selection data: {}' . format ( data ) )
Make example dataset .
82
4
231,794
def _table_to_bintable ( table ) : data = table . as_array ( ) header = fits . Header ( ) header . update ( table . meta ) name = table . meta . pop ( 'name' , None ) return fits . BinTableHDU ( data , header , name = name )
Convert ~astropy . table . Table to astropy . io . fits . BinTable .
67
20
231,795
def read_ds9 ( filename , errors = 'strict' ) : with open ( filename ) as fh : region_string = fh . read ( ) parser = DS9Parser ( region_string , errors = errors ) return parser . shapes . to_regions ( )
Read a DS9 region file in as a list of ~regions . Region objects .
60
18
231,796
def set_coordsys ( self , coordsys ) : if coordsys in self . coordsys_mapping : self . coordsys = self . coordsys_mapping [ coordsys ] else : self . coordsys = coordsys
Transform coordinate system
58
3
231,797
def run ( self ) : for line_ in self . region_string . split ( '\n' ) : for line in line_ . split ( ";" ) : self . parse_line ( line ) log . debug ( 'Global state: {}' . format ( self ) )
Run all steps
60
3
231,798
def parse_meta ( meta_str ) : keys_vals = [ ( x , y ) for x , _ , y in regex_meta . findall ( meta_str . strip ( ) ) ] extra_text = regex_meta . split ( meta_str . strip ( ) ) [ - 1 ] result = OrderedDict ( ) for key , val in keys_vals : # regex can include trailing whitespace or inverted commas # remove it val = val . strip ( ) . strip ( "'" ) . strip ( '"' ) if key == 'text' : val = val . lstrip ( "{" ) . rstrip ( "}" ) if key in result : if key == 'tag' : result [ key ] . append ( val ) else : raise ValueError ( "Duplicate key {0} found" . format ( key ) ) else : if key == 'tag' : result [ key ] = [ val ] else : result [ key ] = val if extra_text : result [ 'comment' ] = extra_text return result
Parse the metadata for a single ds9 region string .
224
13
231,799
def parse_region ( self , include , region_type , region_end , line ) : if self . coordsys is None : raise DS9RegionParserError ( "No coordinate system specified and a" " region has been found." ) else : helper = DS9RegionParser ( coordsys = self . coordsys , include = include , region_type = region_type , region_end = region_end , global_meta = self . global_meta , line = line ) helper . parse ( ) self . shapes . append ( helper . shape )
Extract a Shape from a region string
119
8