idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
231,600
def seek ( self , offset , whence = SEEK_SET ) : # A nice trick is to (ab)use BytesIO.seek() to go to the desired position for easier calculation. # This will not add any data to the buffer however - very convenient! self . data . seek ( offset , whence ) new_pos = self . data . tell ( ) missing_bytes_to_read = new_pos - self . _current_lob_length if missing_bytes_to_read > 0 : # Trying to seek beyond currently available LOB data, so need to load some more first. # We are smart here: (at least trying...): # If a user sets a certain file position s/he probably wants to read data from # there. So already read some extra data to avoid yet another immediate # reading step. Try with EXTRA_NUM_ITEMS_TO_READ_AFTER_SEEK additional items (bytes/chars). # jump to the end of the current buffer and read the new data: self . data . seek ( 0 , SEEK_END ) self . read ( missing_bytes_to_read + self . EXTRA_NUM_ITEMS_TO_READ_AFTER_SEEK ) # reposition file pointer a originally desired position: self . data . seek ( new_pos ) return new_pos
Seek pointer in lob data buffer to requested position . Might trigger further loading of data from the database if the pointer is beyond currently read data .
287
29
231,601
def _read_missing_lob_data_from_db ( self , readoffset , readlength ) : logger . debug ( 'Reading missing lob data from db. Offset: %d, readlength: %d' % ( readoffset , readlength ) ) lob_data = self . _make_read_lob_request ( readoffset , readlength ) # make sure we really got as many items (not bytes!) as requested: enc_lob_data = self . _decode_lob_data ( lob_data ) assert readlength == len ( enc_lob_data ) , 'expected: %d, received; %d' % ( readlength , len ( enc_lob_data ) ) # jump to end of data, and append new and properly decoded data to it: # import pdb;pdb.set_trace() self . data . seek ( 0 , SEEK_END ) self . data . write ( enc_lob_data ) self . _current_lob_length = len ( self . data . getvalue ( ) )
Read LOB request part from database
234
7
231,602
def _init_io_container ( self , init_value ) : if isinstance ( init_value , CLOB_STRING_IO_CLASSES ) : # already a valid StringIO instance, just use it as it is v = init_value else : # works for strings and unicodes. However unicodes must only contain valid ascii chars! if PY3 : # a io.StringIO also accepts any unicode characters, but we must be sure that only # ascii chars are contained. In PY2 we use a cStringIO class which complains by itself # if it catches this case, so in PY2 no extra check needs to be performed here. init_value . encode ( 'ascii' ) # this is just a check, result not needed! v = CLOB_STRING_IO ( init_value ) return v
Initialize container to hold lob data . Here either a cStringIO or a io . StringIO class is used depending on the Python version . For CLobs ensure that an initial unicode value only contains valid ascii chars .
184
47
231,603
def _handle_upsert ( self , parts , unwritten_lobs = ( ) ) : self . description = None self . _received_last_resultset_part = True # set to 'True' so that cursor.fetch*() returns just empty list for part in parts : if part . kind == part_kinds . ROWSAFFECTED : self . rowcount = part . values [ 0 ] elif part . kind in ( part_kinds . TRANSACTIONFLAGS , part_kinds . STATEMENTCONTEXT , part_kinds . PARAMETERMETADATA ) : pass elif part . kind == part_kinds . WRITELOBREPLY : # This part occurrs after lobs have been submitted not at all or only partially during an insert. # In this case the parameter part of the Request message contains a list called 'unwritten_lobs' # with LobBuffer instances. # Those instances are in the same order as 'locator_ids' received in the reply message. These IDs # are then used to deliver the missing LOB data to the server via WRITE_LOB_REQUESTs. for lob_buffer , lob_locator_id in izip ( unwritten_lobs , part . locator_ids ) : # store locator_id in every lob buffer instance for later reference: lob_buffer . locator_id = lob_locator_id self . _perform_lob_write_requests ( unwritten_lobs ) else : raise InterfaceError ( "Prepared insert statement response, unexpected part kind %d." % part . kind ) self . _executed = True
Handle reply messages from INSERT or UPDATE statements
358
9
231,604
def _handle_select ( self , parts , result_metadata = None ) : self . rowcount = - 1 if result_metadata is not None : # Select was prepared and we can use the already received metadata self . description , self . _column_types = self . _handle_result_metadata ( result_metadata ) for part in parts : if part . kind == part_kinds . RESULTSETID : self . _resultset_id = part . value elif part . kind == part_kinds . RESULTSETMETADATA : self . description , self . _column_types = self . _handle_result_metadata ( part ) elif part . kind == part_kinds . RESULTSET : self . _buffer = part . unpack_rows ( self . _column_types , self . connection ) self . _received_last_resultset_part = part . attribute & 1 self . _executed = True elif part . kind in ( part_kinds . STATEMENTCONTEXT , part_kinds . TRANSACTIONFLAGS , part_kinds . PARAMETERMETADATA ) : pass else : raise InterfaceError ( "Prepared select statement response, unexpected part kind %d." % part . kind )
Handle reply messages from SELECT statements
267
6
231,605
def _handle_dbproc_call ( self , parts , parameters_metadata ) : for part in parts : if part . kind == part_kinds . ROWSAFFECTED : self . rowcount = part . values [ 0 ] elif part . kind == part_kinds . TRANSACTIONFLAGS : pass elif part . kind == part_kinds . STATEMENTCONTEXT : pass elif part . kind == part_kinds . OUTPUTPARAMETERS : self . _buffer = part . unpack_rows ( parameters_metadata , self . connection ) self . _received_last_resultset_part = True self . _executed = True elif part . kind == part_kinds . RESULTSETMETADATA : self . description , self . _column_types = self . _handle_result_metadata ( part ) elif part . kind == part_kinds . RESULTSETID : self . _resultset_id = part . value elif part . kind == part_kinds . RESULTSET : self . _buffer = part . unpack_rows ( self . _column_types , self . connection ) self . _received_last_resultset_part = part . attribute & 1 self . _executed = True else : raise InterfaceError ( "Stored procedure call, unexpected part kind %d." % part . kind ) self . _executed = True
Handle reply messages from STORED PROCEDURE statements
302
10
231,606
def allhexlify ( data ) : hx = binascii . hexlify ( data ) return b'' . join ( [ b'\\x' + o for o in re . findall ( b'..' , hx ) ] )
Hexlify given data into a string representation with hex values for all chars Input like ab \ x04ce becomes \ x61 \ x62 \ x04 \ x63 \ x65
55
39
231,607
def pack ( self , remaining_size ) : arguments_count , payload = self . pack_data ( remaining_size - self . header_size ) payload_length = len ( payload ) # align payload length to multiple of 8 if payload_length % 8 != 0 : payload += b"\x00" * ( 8 - payload_length % 8 ) self . header = PartHeader ( self . kind , self . attribute , arguments_count , self . bigargumentcount , payload_length , remaining_size ) hdr = self . header_struct . pack ( * self . header ) if pyhdb . tracing : self . trace_header = humanhexlify ( hdr , 30 ) self . trace_payload = humanhexlify ( payload , 30 ) return hdr + payload
Pack data of part into binary format
168
7
231,608
def unpack_from ( cls , payload , expected_parts ) : for num_part in iter_range ( expected_parts ) : hdr = payload . read ( cls . header_size ) try : part_header = PartHeader ( * cls . header_struct . unpack ( hdr ) ) except struct . error : raise InterfaceError ( "No valid part header" ) if part_header . payload_size % 8 != 0 : part_payload_size = part_header . payload_size + 8 - ( part_header . payload_size % 8 ) else : part_payload_size = part_header . payload_size pl = payload . read ( part_payload_size ) part_payload = io . BytesIO ( pl ) try : _PartClass = PART_MAPPING [ part_header . part_kind ] except KeyError : raise InterfaceError ( "Unknown part kind %s" % part_header . part_kind ) debug ( '%s (%d/%d): %s' , _PartClass . __name__ , num_part + 1 , expected_parts , str ( part_header ) ) debug ( 'Read %d bytes payload for part %d' , part_payload_size , num_part + 1 ) init_arguments = _PartClass . unpack_data ( part_header . argument_count , part_payload ) debug ( 'Part data: %s' , init_arguments ) part = _PartClass ( * init_arguments ) part . header = part_header part . attribute = part_header . part_attributes part . source = 'server' if pyhdb . tracing : part . trace_header = humanhexlify ( hdr [ : part_header . payload_size ] ) part . trace_payload = humanhexlify ( pl , 30 ) yield part
Unpack parts from payload
407
5
231,609
def pack_data ( self , remaining_size ) : payload = self . part_struct . pack ( self . locator_id , self . readoffset + 1 , self . readlength , b' ' ) return 4 , payload
Pack data . readoffset has to be increased by one seems like HANA starts from 1 not zero .
49
21
231,610
def build_payload ( self , payload ) : for segment in self . segments : segment . pack ( payload , commit = self . autocommit )
Build payload of message .
33
5
231,611
def pack ( self ) : payload = io . BytesIO ( ) # Advance num bytes equal to header size - the header is written later # after the payload of all segments and parts has been written: payload . seek ( self . header_size , io . SEEK_CUR ) # Write out payload of segments and parts: self . build_payload ( payload ) packet_length = len ( payload . getvalue ( ) ) - self . header_size self . header = MessageHeader ( self . session_id , self . packet_count , packet_length , constants . MAX_SEGMENT_SIZE , num_segments = len ( self . segments ) , packet_options = 0 ) packed_header = self . header_struct . pack ( * self . header ) # Go back to begining of payload for writing message header: payload . seek ( 0 ) payload . write ( packed_header ) payload . seek ( 0 , io . SEEK_END ) trace ( self ) return payload
Pack message to binary stream .
211
6
231,612
def check_specs ( specs , renamings , types ) : from pythran . types . tog import unify , clone , tr from pythran . types . tog import Function , TypeVariable , InferenceError functions = { renamings . get ( k , k ) : v for k , v in specs . functions . items ( ) } for fname , signatures in functions . items ( ) : ftype = types [ fname ] for signature in signatures : sig_type = Function ( [ tr ( p ) for p in signature ] , TypeVariable ( ) ) try : unify ( clone ( sig_type ) , clone ( ftype ) ) except InferenceError : raise PythranSyntaxError ( "Specification for `{}` does not match inferred type:\n" "expected `{}`\n" "got `Callable[[{}], ...]`" . format ( fname , ftype , ", " . join ( map ( str , sig_type . types [ : - 1 ] ) ) ) )
Does nothing but raising PythranSyntaxError if specs are incompatible with the actual code
224
17
231,613
def check_exports ( mod , specs , renamings ) : functions = { renamings . get ( k , k ) : v for k , v in specs . functions . items ( ) } mod_functions = { node . name : node for node in mod . body if isinstance ( node , ast . FunctionDef ) } for fname , signatures in functions . items ( ) : try : fnode = mod_functions [ fname ] except KeyError : raise PythranSyntaxError ( "Invalid spec: exporting undefined function `{}`" . format ( fname ) ) for signature in signatures : args_count = len ( fnode . args . args ) if len ( signature ) > args_count : raise PythranSyntaxError ( "Too many arguments when exporting `{}`" . format ( fname ) ) elif len ( signature ) < args_count - len ( fnode . args . defaults ) : raise PythranSyntaxError ( "Not enough arguments when exporting `{}`" . format ( fname ) )
Does nothing but raising PythranSyntaxError if specs references an undefined global
226
15
231,614
def visit_Import ( self , node ) : for alias in node . names : current_module = MODULES # Recursive check for submodules for path in alias . name . split ( '.' ) : if path not in current_module : raise PythranSyntaxError ( "Module '{0}' unknown." . format ( alias . name ) , node ) else : current_module = current_module [ path ]
Check if imported module exists in MODULES .
90
10
231,615
def visit_ImportFrom ( self , node ) : if node . level : raise PythranSyntaxError ( "Relative import not supported" , node ) if not node . module : raise PythranSyntaxError ( "import from without module" , node ) module = node . module current_module = MODULES # Check if module exists for path in module . split ( '.' ) : if path not in current_module : raise PythranSyntaxError ( "Module '{0}' unknown." . format ( module ) , node ) else : current_module = current_module [ path ] # Check if imported functions exist for alias in node . names : if alias . name == '*' : continue elif alias . name not in current_module : raise PythranSyntaxError ( "identifier '{0}' not found in module '{1}'" . format ( alias . name , module ) , node )
Check validity of imported functions .
197
6
231,616
def uncamel ( name ) : s1 = re . sub ( '(.)([A-Z][a-z]+)' , r'\1_\2' , name ) return re . sub ( '([a-z0-9])([A-Z])' , r'\1_\2' , s1 ) . lower ( )
Transform CamelCase naming convention into C - ish convention .
77
12
231,617
def verify_dependencies ( self ) : for i in range ( 1 , len ( self . deps ) ) : assert ( not ( isinstance ( self . deps [ i ] , Transformation ) and isinstance ( self . deps [ i - 1 ] , Analysis ) ) ) , "invalid dep order for %s" % self
Checks no analysis are called before a transformation as the transformation could invalidate the analysis .
72
18
231,618
def prepare ( self , node ) : if isinstance ( node , ast . Module ) : self . ctx . module = node elif isinstance ( node , ast . FunctionDef ) : self . ctx . function = node for D in self . deps : d = D ( ) d . attach ( self . passmanager , self . ctx ) result = d . run ( node ) setattr ( self , uncamel ( D . __name__ ) , result )
Gather analysis result required by this analysis
100
8
231,619
def run ( self , node ) : n = super ( Transformation , self ) . run ( node ) if self . update : ast . fix_missing_locations ( n ) self . passmanager . _cache . clear ( ) return n
Apply transformation and dependencies and fix new node location .
50
10
231,620
def apply ( self , node ) : new_node = self . run ( node ) return self . update , new_node
Apply transformation and return if an update happened .
26
9
231,621
def gather ( self , analysis , node ) : assert issubclass ( analysis , Analysis ) a = analysis ( ) a . attach ( self ) return a . run ( node )
High - level function to call an analysis on a node
37
11
231,622
def dump ( self , backend , node ) : assert issubclass ( backend , Backend ) b = backend ( ) b . attach ( self ) return b . run ( node )
High - level function to call a backend on a node to generate code for module module_name .
38
20
231,623
def apply ( self , transformation , node ) : assert issubclass ( transformation , ( Transformation , Analysis ) ) a = transformation ( ) a . attach ( self ) res = a . apply ( node ) # the transformation updated the AST, so analyse may need to be rerun # we could use a finer-grain caching system, and provide a way to flag # some analyses as `unmodified' by the transformation, as done in LLVM # (and PIPS ;-) if a . update : self . _cache . clear ( ) return res
High - level function to call a transformation on a node . If the transformation is an analysis the result of the analysis is displayed .
113
26
231,624
def pytype_to_ctype ( t ) : if isinstance ( t , List ) : return 'pythonic::types::list<{0}>' . format ( pytype_to_ctype ( t . __args__ [ 0 ] ) ) elif isinstance ( t , Set ) : return 'pythonic::types::set<{0}>' . format ( pytype_to_ctype ( t . __args__ [ 0 ] ) ) elif isinstance ( t , Dict ) : tkey , tvalue = t . __args__ return 'pythonic::types::dict<{0},{1}>' . format ( pytype_to_ctype ( tkey ) , pytype_to_ctype ( tvalue ) ) elif isinstance ( t , Tuple ) : return 'decltype(pythonic::types::make_tuple({0}))' . format ( ", " . join ( 'std::declval<{}>()' . format ( pytype_to_ctype ( p ) ) for p in t . __args__ ) ) elif isinstance ( t , NDArray ) : dtype = pytype_to_ctype ( t . __args__ [ 0 ] ) ndim = len ( t . __args__ ) - 1 shapes = ',' . join ( ( 'long' if s . stop == - 1 or s . stop is None else 'std::integral_constant<long, {}>' . format ( s . stop ) ) for s in t . __args__ [ 1 : ] ) pshape = 'pythonic::types::pshape<{0}>' . format ( shapes ) arr = 'pythonic::types::ndarray<{0},{1}>' . format ( dtype , pshape ) if t . __args__ [ 1 ] . start == - 1 : return 'pythonic::types::numpy_texpr<{0}>' . format ( arr ) elif any ( s . step is not None and s . step < 0 for s in t . __args__ [ 1 : ] ) : slices = ", " . join ( [ 'pythonic::types::normalized_slice' ] * ndim ) return 'pythonic::types::numpy_gexpr<{0},{1}>' . format ( arr , slices ) else : return arr elif isinstance ( t , Pointer ) : return 'pythonic::types::pointer<{0}>' . format ( pytype_to_ctype ( t . __args__ [ 0 ] ) ) elif isinstance ( t , Fun ) : return 'pythonic::types::cfun<{0}({1})>' . format ( pytype_to_ctype ( t . __args__ [ - 1 ] ) , ", " . join ( pytype_to_ctype ( arg ) for arg in t . __args__ [ : - 1 ] ) , ) elif t in PYTYPE_TO_CTYPE_TABLE : return PYTYPE_TO_CTYPE_TABLE [ t ] else : raise NotImplementedError ( "{0}:{1}" . format ( type ( t ) , t ) )
Python - > pythonic type binding .
703
8
231,625
def pytype_to_pretty_type ( t ) : if isinstance ( t , List ) : return '{0} list' . format ( pytype_to_pretty_type ( t . __args__ [ 0 ] ) ) elif isinstance ( t , Set ) : return '{0} set' . format ( pytype_to_pretty_type ( t . __args__ [ 0 ] ) ) elif isinstance ( t , Dict ) : tkey , tvalue = t . __args__ return '{0}:{1} dict' . format ( pytype_to_pretty_type ( tkey ) , pytype_to_pretty_type ( tvalue ) ) elif isinstance ( t , Tuple ) : return '({0})' . format ( ", " . join ( pytype_to_pretty_type ( p ) for p in t . __args__ ) ) elif isinstance ( t , NDArray ) : dtype = pytype_to_pretty_type ( t . __args__ [ 0 ] ) ndim = len ( t . __args__ ) - 1 arr = '{0}[{1}]' . format ( dtype , ',' . join ( ':' if s . stop in ( - 1 , None ) else str ( s . stop ) for s in t . __args__ [ 1 : ] ) ) # it's a transpose! if t . __args__ [ 1 ] . start == - 1 : return '{} order(F)' . format ( arr ) elif any ( s . step is not None and s . step < 0 for s in t . __args__ [ 1 : ] ) : return '{0}[{1}]' . format ( dtype , ',' . join ( [ '::' ] * ndim ) ) else : return arr elif isinstance ( t , Pointer ) : dtype = pytype_to_pretty_type ( t . __args__ [ 0 ] ) return '{}*' . format ( dtype ) elif isinstance ( t , Fun ) : rtype = pytype_to_pretty_type ( t . __args__ [ - 1 ] ) argtypes = [ pytype_to_pretty_type ( arg ) for arg in t . __args__ [ : - 1 ] ] return '{}({})' . format ( rtype , ", " . join ( argtypes ) ) elif t in PYTYPE_TO_CTYPE_TABLE : return t . __name__ else : raise NotImplementedError ( "{0}:{1}" . format ( type ( t ) , t ) )
Python - > docstring type .
573
7
231,626
def get_type ( name , env , non_generic ) : if name in env : if isinstance ( env [ name ] , MultiType ) : return clone ( env [ name ] ) return fresh ( env [ name ] , non_generic ) else : print ( "W: Undefined symbol {0}" . format ( name ) ) return TypeVariable ( )
Get the type of identifier name from the type environment env .
76
12
231,627
def fresh ( t , non_generic ) : mappings = { } # A mapping of TypeVariables to TypeVariables def freshrec ( tp ) : p = prune ( tp ) if isinstance ( p , TypeVariable ) : if is_generic ( p , non_generic ) : if p not in mappings : mappings [ p ] = TypeVariable ( ) return mappings [ p ] else : return p elif isinstance ( p , dict ) : return p # module elif isinstance ( p , Collection ) : return Collection ( * [ freshrec ( x ) for x in p . types ] ) elif isinstance ( p , Scalar ) : return Scalar ( [ freshrec ( x ) for x in p . types ] ) elif isinstance ( p , TypeOperator ) : return TypeOperator ( p . name , [ freshrec ( x ) for x in p . types ] ) elif isinstance ( p , MultiType ) : return MultiType ( [ freshrec ( x ) for x in p . types ] ) else : assert False , "missing freshrec case {}" . format ( type ( p ) ) return freshrec ( t )
Makes a copy of a type expression .
252
9
231,628
def prune ( t ) : if isinstance ( t , TypeVariable ) : if t . instance is not None : t . instance = prune ( t . instance ) return t . instance return t
Returns the currently defining instance of t .
42
8
231,629
def occurs_in_type ( v , type2 ) : pruned_type2 = prune ( type2 ) if pruned_type2 == v : return True elif isinstance ( pruned_type2 , TypeOperator ) : return occurs_in ( v , pruned_type2 . types ) return False
Checks whether a type variable occurs in a type expression .
69
12
231,630
def visit_Module ( self , node ) : node . body = [ k for k in ( self . visit ( n ) for n in node . body ) if k ] imports = [ ast . Import ( [ ast . alias ( i , mangle ( i ) ) ] ) for i in self . imports ] node . body = imports + node . body ast . fix_missing_locations ( node ) return node
Visit the whole module and add all import at the top level .
87
13
231,631
def visit_Name ( self , node ) : if node . id in self . symbols : symbol = path_to_node ( self . symbols [ node . id ] ) if not getattr ( symbol , 'isliteral' , lambda : False ) ( ) : parent = self . ancestors [ node ] [ - 1 ] blacklist = ( ast . Tuple , ast . List , ast . Set , ast . Return ) if isinstance ( parent , blacklist ) : raise PythranSyntaxError ( "Unsupported module identifier manipulation" , node ) new_node = path_to_attr ( self . symbols [ node . id ] ) new_node . ctx = node . ctx ast . copy_location ( new_node , node ) return new_node return node
Replace name with full expanded name .
162
8
231,632
def save_function_effect ( module ) : for intr in module . values ( ) : if isinstance ( intr , dict ) : # Submodule case save_function_effect ( intr ) else : fe = FunctionEffects ( intr ) IntrinsicArgumentEffects [ intr ] = fe if isinstance ( intr , intrinsic . Class ) : save_function_effect ( intr . fields )
Recursively save function effect for pythonic functions .
82
11
231,633
def prepare ( self , node ) : super ( ArgumentEffects , self ) . prepare ( node ) for n in self . global_declarations . values ( ) : fe = FunctionEffects ( n ) self . node_to_functioneffect [ n ] = fe self . result . add_node ( fe )
Initialise arguments effects as this analyse is inter - procedural .
64
12
231,634
def process_locals ( self , node , node_visited , * skipped ) : local_vars = self . scope [ node ] . difference ( skipped ) local_vars = local_vars . difference ( self . openmp_deps ) if not local_vars : return node_visited # no processing locals_visited = [ ] for varname in local_vars : vartype = self . typeof ( varname ) decl = Statement ( "{} {}" . format ( vartype , varname ) ) locals_visited . append ( decl ) self . ldecls . difference_update ( local_vars ) return Block ( locals_visited + [ node_visited ] )
Declare variable local to node and insert declaration before .
156
11
231,635
def process_omp_attachements ( self , node , stmt , index = None ) : omp_directives = metadata . get ( node , OMPDirective ) if omp_directives : directives = list ( ) for directive in omp_directives : directive . deps = [ self . visit ( dep ) for dep in directive . deps ] directives . append ( directive ) if index is None : stmt = AnnotatedStatement ( stmt , directives ) else : stmt [ index ] = AnnotatedStatement ( stmt [ index ] , directives ) return stmt
Add OpenMP pragma on the correct stmt in the correct order .
127
15
231,636
def visit_Assign ( self , node ) : if not all ( isinstance ( n , ( ast . Name , ast . Subscript ) ) for n in node . targets ) : raise PythranSyntaxError ( "Must assign to an identifier or a subscript" , node ) value = self . visit ( node . value ) targets = [ self . visit ( t ) for t in node . targets ] alltargets = "= " . join ( targets ) islocal = ( len ( targets ) == 1 and isinstance ( node . targets [ 0 ] , ast . Name ) and node . targets [ 0 ] . id in self . scope [ node ] and node . targets [ 0 ] . id not in self . openmp_deps ) if islocal : # remove this decls from local decls self . ldecls . difference_update ( t . id for t in node . targets ) # add a local declaration if self . types [ node . targets [ 0 ] ] . iscombined ( ) : alltargets = '{} {}' . format ( self . typeof ( node . targets [ 0 ] ) , alltargets ) elif isinstance ( self . types [ node . targets [ 0 ] ] , self . types . builder . Assignable ) : alltargets = '{} {}' . format ( self . types . builder . Assignable ( self . types . builder . NamedType ( 'decltype({})' . format ( value ) ) ) , alltargets ) else : assert isinstance ( self . types [ node . targets [ 0 ] ] , self . types . builder . Lazy ) alltargets = '{} {}' . format ( self . types . builder . Lazy ( self . types . builder . NamedType ( 'decltype({})' . format ( value ) ) ) , alltargets ) stmt = Assign ( alltargets , value ) return self . process_omp_attachements ( node , stmt )
Create Assign node for final Cxx representation .
431
10
231,637
def gen_for ( self , node , target , local_iter , local_iter_decl , loop_body ) : # Choose target variable for iterator (which is iterator type) local_target = "__target{0}" . format ( id ( node ) ) local_target_decl = self . types . builder . IteratorOfType ( local_iter_decl ) # If variable is local to the for body it's a ref to the iterator value # type if node . target . id in self . scope [ node ] and not hasattr ( self , 'yields' ) : local_type = "auto&&" else : local_type = "" # Assign iterable value loop_body_prelude = Statement ( "{} {}= *{}" . format ( local_type , target , local_target ) ) # Create the loop assign = self . make_assign ( local_target_decl , local_target , local_iter ) loop = For ( "{}.begin()" . format ( assign ) , "{0} < {1}.end()" . format ( local_target , local_iter ) , "++{0}" . format ( local_target ) , Block ( [ loop_body_prelude , loop_body ] ) ) return [ self . process_omp_attachements ( node , loop ) ]
Create For representation on iterator for Cxx generation .
288
10
231,638
def handle_real_loop_comparison ( self , args , target , upper_bound ) : # order is 1 for increasing loop, -1 for decreasing loop and 0 if it is # not known at compile time if len ( args ) <= 2 : order = 1 elif isinstance ( args [ 2 ] , ast . Num ) : order = - 1 + 2 * ( int ( args [ 2 ] . n ) > 0 ) elif isinstance ( args [ 1 ] , ast . Num ) and isinstance ( args [ 0 ] , ast . Num ) : order = - 1 + 2 * ( int ( args [ 1 ] . n ) > int ( args [ 0 ] . n ) ) else : order = 0 comparison = "{} < {}" if order == 1 else "{} > {}" comparison = comparison . format ( target , upper_bound ) return comparison
Handle comparison for real loops .
184
6
231,639
def gen_c_for ( self , node , local_iter , loop_body ) : args = node . iter . args step = "1L" if len ( args ) <= 2 else self . visit ( args [ 2 ] ) if len ( args ) == 1 : lower_bound = "0L" upper_arg = 0 else : lower_bound = self . visit ( args [ 0 ] ) upper_arg = 1 upper_type = iter_type = "long " upper_value = self . visit ( args [ upper_arg ] ) if is_simple_expr ( args [ upper_arg ] ) : upper_bound = upper_value # compatible with collapse else : upper_bound = "__target{0}" . format ( id ( node ) ) # If variable is local to the for body keep it local... if node . target . id in self . scope [ node ] and not hasattr ( self , 'yields' ) : loop = list ( ) else : # For yield function, upper_bound is globals. iter_type = "" # Back one step to keep Python behavior (except for break) loop = [ If ( "{} == {}" . format ( local_iter , upper_bound ) , Statement ( "{} -= {}" . format ( local_iter , step ) ) ) ] comparison = self . handle_real_loop_comparison ( args , local_iter , upper_bound ) forloop = For ( "{0} {1}={2}" . format ( iter_type , local_iter , lower_bound ) , comparison , "{0} += {1}" . format ( local_iter , step ) , loop_body ) loop . insert ( 0 , self . process_omp_attachements ( node , forloop ) ) # Store upper bound value if needed if upper_bound is upper_value : header = [ ] else : assgnt = self . make_assign ( upper_type , upper_bound , upper_value ) header = [ Statement ( assgnt ) ] return header , loop
Create C For representation for Cxx generation .
439
9
231,640
def handle_omp_for ( self , node , local_iter ) : for directive in metadata . get ( node , OMPDirective ) : if any ( key in directive . s for key in ( ' parallel ' , ' task ' ) ) : # Eventually add local_iter in a shared clause as iterable is # shared in the for loop (for every clause with datasharing) directive . s += ' shared({})' directive . deps . append ( ast . Name ( local_iter , ast . Load ( ) , None ) ) directive . shared_deps . append ( directive . deps [ - 1 ] ) target = node . target assert isinstance ( target , ast . Name ) hasfor = 'for' in directive . s nodefault = 'default' not in directive . s noindexref = all ( isinstance ( x , ast . Name ) and x . id != target . id for x in directive . deps ) if ( hasfor and nodefault and noindexref and target . id not in self . scope [ node ] ) : # Target is private by default in omp but iterator use may # introduce an extra variable directive . s += ' private({})' directive . deps . append ( ast . Name ( target . id , ast . Load ( ) , None ) ) directive . private_deps . append ( directive . deps [ - 1 ] )
Fix OpenMP directives on For loops .
296
8
231,641
def can_use_autofor ( self , node ) : auto_for = ( isinstance ( node . target , ast . Name ) and node . target . id in self . scope [ node ] and node . target . id not in self . openmp_deps ) auto_for &= not metadata . get ( node , OMPDirective ) auto_for &= node . target . id not in self . openmp_deps return auto_for
Check if given for Node can use autoFor syntax .
98
11
231,642
def can_use_c_for ( self , node ) : assert isinstance ( node . target , ast . Name ) if sys . version_info . major == 3 : range_name = 'range' else : range_name = 'xrange' pattern_range = ast . Call ( func = ast . Attribute ( value = ast . Name ( id = '__builtin__' , ctx = ast . Load ( ) , annotation = None ) , attr = range_name , ctx = ast . Load ( ) ) , args = AST_any ( ) , keywords = [ ] ) is_assigned = { node . target . id : False } [ is_assigned . update ( self . gather ( IsAssigned , stmt ) ) for stmt in node . body ] nodes = ASTMatcher ( pattern_range ) . search ( node . iter ) if ( node . iter not in nodes or is_assigned [ node . target . id ] ) : return False args = node . iter . args if len ( args ) < 3 : return True if isinstance ( args [ 2 ] , ast . Num ) : return True return False
Check if a for loop can use classic C syntax .
246
11
231,643
def visit_For ( self , node ) : if not isinstance ( node . target , ast . Name ) : raise PythranSyntaxError ( "Using something other than an identifier as loop target" , node . target ) target = self . visit ( node . target ) # Handle the body of the for loop loop_body = Block ( [ self . visit ( stmt ) for stmt in node . body ] ) # Declare local variables at the top of the loop body loop_body = self . process_locals ( node , loop_body , node . target . id ) iterable = self . visit ( node . iter ) if self . can_use_c_for ( node ) : header , loop = self . gen_c_for ( node , target , loop_body ) else : if self . can_use_autofor ( node ) : header = [ ] self . ldecls . remove ( node . target . id ) autofor = AutoFor ( target , iterable , loop_body ) loop = [ self . process_omp_attachements ( node , autofor ) ] else : # Iterator declaration local_iter = "__iter{0}" . format ( id ( node ) ) local_iter_decl = self . types . builder . Assignable ( self . types [ node . iter ] ) self . handle_omp_for ( node , local_iter ) # Assign iterable # For C loop, it avoids issues # if the upper bound is assigned in the loop asgnt = self . make_assign ( local_iter_decl , local_iter , iterable ) header = [ Statement ( asgnt ) ] loop = self . gen_for ( node , target , local_iter , local_iter_decl , loop_body ) # For xxxComprehension, it is replaced by a for loop. In this case, # pre-allocate size of container. for comp in metadata . get ( node , metadata . Comprehension ) : header . append ( Statement ( "pythonic::utils::reserve({0},{1})" . format ( comp . target , iterable ) ) ) return Block ( header + loop )
Create For representation for Cxx generation .
466
8
231,644
def visit_While ( self , node ) : test = self . visit ( node . test ) body = [ self . visit ( n ) for n in node . body ] stmt = While ( test , Block ( body ) ) return self . process_omp_attachements ( node , stmt )
Create While node for Cxx generation .
64
8
231,645
def visit_Break ( self , _ ) : if self . break_handlers and self . break_handlers [ - 1 ] : return Statement ( "goto {0}" . format ( self . break_handlers [ - 1 ] ) ) else : return Statement ( "break" )
Generate break statement in most case and goto for orelse clause .
62
15
231,646
def visit_Module ( self , node ) : # build all types deps = sorted ( self . dependencies ) headers = [ Include ( os . path . join ( "pythonic" , "include" , * t ) + ".hpp" ) for t in deps ] headers += [ Include ( os . path . join ( "pythonic" , * t ) + ".hpp" ) for t in deps ] decls_n_defns = [ self . visit ( stmt ) for stmt in node . body ] decls , defns = zip ( * [ s for s in decls_n_defns if s ] ) nsbody = [ s for ls in decls + defns for s in ls ] ns = Namespace ( pythran_ward + self . passmanager . module_name , nsbody ) self . result = CompilationUnit ( headers + [ ns ] )
Build a compilation unit .
193
5
231,647
def refine ( pm , node , optimizations ) : # Sanitize input pm . apply ( ExpandGlobals , node ) pm . apply ( ExpandImportAll , node ) pm . apply ( NormalizeTuples , node ) pm . apply ( ExpandBuiltins , node ) pm . apply ( ExpandImports , node ) pm . apply ( NormalizeMethodCalls , node ) pm . apply ( NormalizeIsNone , node ) pm . apply ( SplitStaticExpression , node ) pm . apply ( NormalizeStaticIf , node ) pm . apply ( NormalizeTuples , node ) pm . apply ( NormalizeException , node ) pm . apply ( NormalizeMethodCalls , node ) # Some early optimizations pm . apply ( ComprehensionPatterns , node ) pm . apply ( RemoveLambdas , node ) pm . apply ( RemoveNestedFunctions , node ) pm . apply ( NormalizeCompare , node ) pm . gather ( ExtendedSyntaxCheck , node ) pm . apply ( ListCompToGenexp , node ) pm . apply ( RemoveComprehension , node ) pm . apply ( RemoveNamedArguments , node ) # sanitize input pm . apply ( NormalizeReturn , node ) pm . apply ( UnshadowParameters , node ) pm . apply ( FalsePolymorphism , node ) # some extra optimizations apply_optimisation = True while apply_optimisation : apply_optimisation = False for optimization in optimizations : apply_optimisation |= pm . apply ( optimization , node ) [ 0 ]
Refine node in place until it matches pythran s expectations .
321
14
231,648
def prepare ( self , node ) : super ( GlobalEffects , self ) . prepare ( node ) def register_node ( module ) : """ Recursively save globals effect for all functions. """ for v in module . values ( ) : if isinstance ( v , dict ) : # Submodule case register_node ( v ) else : fe = GlobalEffects . FunctionEffect ( v ) self . node_to_functioneffect [ v ] = fe self . result . add_node ( fe ) if isinstance ( v , intrinsic . Class ) : register_node ( v . fields ) register_node ( self . global_declarations ) for module in MODULES . values ( ) : register_node ( module ) self . node_to_functioneffect [ intrinsic . UnboundValue ] = GlobalEffects . FunctionEffect ( intrinsic . UnboundValue )
Initialise globals effects as this analyse is inter - procedural .
179
13
231,649
def prepare ( self , node ) : def register ( name , module ) : """ Recursively save function typing and combiners for Pythonic.""" for fname , function in module . items ( ) : if isinstance ( function , dict ) : register ( name + "::" + fname , function ) else : tname = 'pythonic::{0}::functor::{1}' . format ( name , fname ) self . result [ function ] = self . builder . NamedType ( tname ) self . combiners [ function ] = function if isinstance ( function , Class ) : register ( name + "::" + fname , function . fields ) for mname , module in MODULES . items ( ) : register ( mname , module ) super ( Types , self ) . prepare ( node )
Initialise values to prepare typing computation .
175
8
231,650
def register ( self , ptype ) : # Too many of them leads to memory burst if len ( self . typedefs ) < cfg . getint ( 'typing' , 'max_combiner' ) : self . typedefs . append ( ptype ) return True return False
register ptype as a local typedef
62
8
231,651
def isargument ( self , node ) : try : node_id , _ = self . node_to_id ( node ) return ( node_id in self . name_to_nodes and any ( [ isinstance ( n , ast . Name ) and isinstance ( n . ctx , ast . Param ) for n in self . name_to_nodes [ node_id ] ] ) ) except UnboundableRValue : return False
checks whether node aliases to a parameter .
95
8
231,652
def combine ( self , node , othernode , op = None , unary_op = None , register = False , aliasing_type = False ) : if self . result [ othernode ] is self . builder . UnknownType : if node not in self . result : self . result [ node ] = self . builder . UnknownType return if aliasing_type : self . combine_ ( node , othernode , op or operator . add , unary_op or ( lambda x : x ) , register ) for a in self . strict_aliases [ node ] : self . combine_ ( a , othernode , op or operator . add , unary_op or ( lambda x : x ) , register ) else : self . combine_ ( node , othernode , op or operator . add , unary_op or ( lambda x : x ) , register )
Change node typing with combination of node and othernode .
182
11
231,653
def visit_Return ( self , node ) : self . generic_visit ( node ) # No merge are done if the function is a generator. if not self . yield_points : assert node . value , "Values were added in each return statement." self . combine ( self . current , node . value )
Compute return type and merges with others possible return type .
65
13
231,654
def visit_Yield ( self , node ) : self . generic_visit ( node ) self . combine ( self . current , node . value )
Compute yield type and merges it with others yield type .
32
13
231,655
def visit_BoolOp ( self , node ) : # Visit subnodes self . generic_visit ( node ) # Merge all operands types. [ self . combine ( node , value ) for value in node . values ]
Merge BoolOp operand type .
49
9
231,656
def visit_Num ( self , node ) : ty = type ( node . n ) sty = pytype_to_ctype ( ty ) if node in self . immediates : sty = "std::integral_constant<%s, %s>" % ( sty , node . n ) self . result [ node ] = self . builder . NamedType ( sty )
Set type for number .
79
5
231,657
def visit_Str ( self , node ) : self . result [ node ] = self . builder . NamedType ( pytype_to_ctype ( str ) )
Set the pythonic string type .
35
7
231,658
def visit_Attribute ( self , node ) : obj , path = attr_to_path ( node ) # If no type is given, use a decltype if obj . isliteral ( ) : typename = pytype_to_ctype ( obj . signature ) self . result [ node ] = self . builder . NamedType ( typename ) else : self . result [ node ] = self . builder . DeclType ( '::' . join ( path ) + '{}' )
Compute typing for an attribute node .
105
8
231,659
def visit_Slice ( self , node ) : self . generic_visit ( node ) if node . step is None or ( isinstance ( node . step , ast . Num ) and node . step . n == 1 ) : self . result [ node ] = self . builder . NamedType ( 'pythonic::types::contiguous_slice' ) else : self . result [ node ] = self . builder . NamedType ( 'pythonic::types::slice' )
Set slicing type using continuous information if provided .
100
9
231,660
def init_not_msvc ( self ) : # find_library() does not search automatically LD_LIBRARY_PATH paths = os . environ . get ( 'LD_LIBRARY_PATH' , '' ) . split ( ':' ) for gomp in ( 'libgomp.so' , 'libgomp.dylib' ) : if cxx is None : continue cmd = [ cxx , '-print-file-name=' + gomp ] # the subprocess can fail in various ways # in that case just give up that path try : path = os . path . dirname ( check_output ( cmd ) . strip ( ) ) if path : paths . append ( path ) except OSError : pass # Try to load find libgomp shared library using loader search dirs libgomp_path = find_library ( "gomp" ) # Try to use custom paths if lookup failed for path in paths : if libgomp_path : break path = path . strip ( ) if os . path . isdir ( path ) : libgomp_path = find_library ( os . path . join ( str ( path ) , "libgomp" ) ) if not libgomp_path : raise ImportError ( "I can't find a shared library for libgomp," " you may need to install it or adjust the " "LD_LIBRARY_PATH environment variable." ) else : # Load the library (shouldn't fail with an absolute path right?) self . libomp = ctypes . CDLL ( libgomp_path ) self . version = 45
Find OpenMP library and try to load if using ctype interface .
342
14
231,661
def visit_FunctionDef ( self , node ) : if ( len ( node . body ) == 1 and isinstance ( node . body [ 0 ] , ( ast . Call , ast . Return ) ) ) : ids = self . gather ( Identifiers , node . body [ 0 ] ) # FIXME : It mark "not inlinable" def foo(foo): return foo if node . name not in ids : self . result [ node . name ] = copy . deepcopy ( node )
Determine this function definition can be inlined .
105
11
231,662
def pytype_to_deps_hpp ( t ) : if isinstance ( t , List ) : return { 'list.hpp' } . union ( pytype_to_deps_hpp ( t . __args__ [ 0 ] ) ) elif isinstance ( t , Set ) : return { 'set.hpp' } . union ( pytype_to_deps_hpp ( t . __args__ [ 0 ] ) ) elif isinstance ( t , Dict ) : tkey , tvalue = t . __args__ return { 'dict.hpp' } . union ( pytype_to_deps_hpp ( tkey ) , pytype_to_deps_hpp ( tvalue ) ) elif isinstance ( t , Tuple ) : return { 'tuple.hpp' } . union ( * [ pytype_to_deps_hpp ( elt ) for elt in t . __args__ ] ) elif isinstance ( t , NDArray ) : out = { 'ndarray.hpp' } # it's a transpose! if t . __args__ [ 1 ] . start == - 1 : out . add ( 'numpy_texpr.hpp' ) return out . union ( pytype_to_deps_hpp ( t . __args__ [ 0 ] ) ) elif isinstance ( t , Pointer ) : return { 'pointer.hpp' } . union ( pytype_to_deps_hpp ( t . __args__ [ 0 ] ) ) elif isinstance ( t , Fun ) : return { 'cfun.hpp' } . union ( * [ pytype_to_deps_hpp ( a ) for a in t . __args__ ] ) elif t in PYTYPE_TO_CTYPE_TABLE : return { '{}.hpp' . format ( t . __name__ ) } else : raise NotImplementedError ( "{0}:{1}" . format ( type ( t ) , t ) )
python - > pythonic type hpp filename .
452
10
231,663
def pytype_to_deps ( t ) : res = set ( ) for hpp_dep in pytype_to_deps_hpp ( t ) : res . add ( os . path . join ( 'pythonic' , 'types' , hpp_dep ) ) res . add ( os . path . join ( 'pythonic' , 'include' , 'types' , hpp_dep ) ) return res
python - > pythonic type header full path .
93
10
231,664
def prepare ( self , node ) : super ( TypeDependencies , self ) . prepare ( node ) for v in self . global_declarations . values ( ) : self . result . add_node ( v ) self . result . add_node ( TypeDependencies . NoDeps )
Add nodes for each global declarations in the result graph .
63
11
231,665
def visit_any_conditionnal ( self , node1 , node2 ) : true_naming = false_naming = None try : tmp = self . naming . copy ( ) for expr in node1 : self . visit ( expr ) true_naming = self . naming self . naming = tmp except KeyError : pass try : tmp = self . naming . copy ( ) for expr in node2 : self . visit ( expr ) false_naming = self . naming self . naming = tmp except KeyError : pass if true_naming and not false_naming : self . naming = true_naming elif false_naming and not true_naming : self . naming = false_naming elif true_naming and false_naming : self . naming = false_naming for k , v in true_naming . items ( ) : if k not in self . naming : self . naming [ k ] = v else : for dep in v : if dep not in self . naming [ k ] : self . naming [ k ] . append ( dep )
Set and restore the in_cond variable before visiting subnode .
231
13
231,666
def visit_FunctionDef ( self , node ) : # Ensure there are no nested functions. assert self . current_function is None self . current_function = node self . naming = dict ( ) self . in_cond = False # True when we are in a if, while or for self . generic_visit ( node ) self . current_function = None
Initialize variable for the current function to add edges from calls .
76
13
231,667
def visit_Return ( self , node ) : if not node . value : # Yielding function can't return values return for dep_set in self . visit ( node . value ) : if dep_set : for dep in dep_set : self . result . add_edge ( dep , self . current_function ) else : self . result . add_edge ( TypeDependencies . NoDeps , self . current_function )
Add edge from all possible callee to current function .
92
11
231,668
def visit_Assign ( self , node ) : value_deps = self . visit ( node . value ) for target in node . targets : name = get_variable ( target ) if isinstance ( name , ast . Name ) : self . naming [ name . id ] = value_deps
In case of assignment assign value depend on r - value type dependencies .
63
14
231,669
def visit_AugAssign ( self , node ) : args = ( self . naming [ get_variable ( node . target ) . id ] , self . visit ( node . value ) ) merge_dep = list ( { frozenset . union ( * x ) for x in itertools . product ( * args ) } ) self . naming [ get_variable ( node . target ) . id ] = merge_dep
AugAssigned value depend on r - value type dependencies .
89
12
231,670
def visit_For ( self , node ) : body = node . body if node . target . id in self . naming : body = [ ast . Assign ( targets = [ node . target ] , value = node . iter ) ] + body self . visit_any_conditionnal ( body , node . orelse ) else : iter_dep = self . visit ( node . iter ) self . naming [ node . target . id ] = iter_dep self . visit_any_conditionnal ( body , body + node . orelse )
Handle iterator variable in for loops .
117
7
231,671
def visit_BoolOp ( self , node ) : return sum ( ( self . visit ( value ) for value in node . values ) , [ ] )
Return type may come from any boolop operand .
33
11
231,672
def visit_BinOp ( self , node ) : args = [ self . visit ( arg ) for arg in ( node . left , node . right ) ] return list ( { frozenset . union ( * x ) for x in itertools . product ( * args ) } )
Return type depend from both operand of the binary operation .
61
12
231,673
def visit_Call ( self , node ) : args = [ self . visit ( arg ) for arg in node . args ] func = self . visit ( node . func ) params = args + [ func or [ ] ] return list ( { frozenset . union ( * p ) for p in itertools . product ( * params ) } )
Function call depend on all function use in the call .
73
11
231,674
def visit_Name ( self , node ) : if node . id in self . naming : return self . naming [ node . id ] elif node . id in self . global_declarations : return [ frozenset ( [ self . global_declarations [ node . id ] ] ) ] elif isinstance ( node . ctx , ast . Param ) : deps = [ frozenset ( ) ] self . naming [ node . id ] = deps return deps else : raise PythranInternalError ( "Variable '{}' use before assignment" "" . format ( node . id ) )
Return dependencies for given variable .
128
6
231,675
def visit_List ( self , node ) : if node . elts : return list ( set ( sum ( [ self . visit ( elt ) for elt in node . elts ] , [ ] ) ) ) else : return [ frozenset ( ) ]
List construction depend on each elements type dependency .
56
9
231,676
def visit_ExceptHandler ( self , node ) : if node . name : self . naming [ node . name . id ] = [ frozenset ( ) ] for stmt in node . body : self . visit ( stmt )
Exception may declare a new variable .
49
7
231,677
def arc_distance ( theta_1 , phi_1 , theta_2 , phi_2 ) : temp = np . sin ( ( theta_2 - theta_1 ) / 2 ) ** 2 + np . cos ( theta_1 ) * np . cos ( theta_2 ) * np . sin ( ( phi_2 - phi_1 ) / 2 ) ** 2 distance_matrix = 2 * ( np . arctan2 ( np . sqrt ( temp ) , np . sqrt ( 1 - temp ) ) ) return distance_matrix
Calculates the pairwise arc distance between all points in vector a and b .
128
17
231,678
def visit_Module ( self , node ) : module_body = list ( ) symbols = set ( ) # Gather top level assigned variables. for stmt in node . body : if isinstance ( stmt , ( ast . Import , ast . ImportFrom ) ) : for alias in stmt . names : name = alias . asname or alias . name symbols . add ( name ) # no warning here elif isinstance ( stmt , ast . FunctionDef ) : if stmt . name in symbols : raise PythranSyntaxError ( "Multiple top-level definition of %s." % stmt . name , stmt ) else : symbols . add ( stmt . name ) if not isinstance ( stmt , ast . Assign ) : continue for target in stmt . targets : if not isinstance ( target , ast . Name ) : raise PythranSyntaxError ( "Top-level assignment to an expression." , target ) if target . id in self . to_expand : raise PythranSyntaxError ( "Multiple top-level definition of %s." % target . id , target ) if isinstance ( stmt . value , ast . Name ) : if stmt . value . id in symbols : continue # create aliasing between top level symbols self . to_expand . add ( target . id ) for stmt in node . body : if isinstance ( stmt , ast . Assign ) : # that's not a global var, but a module/function aliasing if all ( isinstance ( t , ast . Name ) and t . id not in self . to_expand for t in stmt . targets ) : module_body . append ( stmt ) continue self . local_decl = set ( ) cst_value = self . visit ( stmt . value ) for target in stmt . targets : assert isinstance ( target , ast . Name ) module_body . append ( ast . FunctionDef ( target . id , ast . arguments ( [ ] , None , [ ] , [ ] , None , [ ] ) , [ ast . Return ( value = cst_value ) ] , [ ] , None ) ) metadata . add ( module_body [ - 1 ] . body [ 0 ] , metadata . StaticReturn ( ) ) else : self . local_decl = self . gather ( LocalNameDeclarations , stmt ) module_body . append ( self . visit ( stmt ) ) self . update |= bool ( self . to_expand ) node . body = module_body return node
Turn globals assignment to functionDef and visit function defs .
537
13
231,679
def visit_Name ( self , node ) : if ( isinstance ( node . ctx , ast . Load ) and node . id not in self . local_decl and node . id in self . to_expand ) : self . update = True return ast . Call ( func = node , args = [ ] , keywords = [ ] ) return node
Turn global variable used not shadows to function call .
74
10
231,680
def visit_Module ( self , node ) : self . skip_functions = True self . generic_visit ( node ) self . skip_functions = False self . generic_visit ( node ) new_imports = self . to_import - self . globals imports = [ ast . Import ( names = [ ast . alias ( name = mod [ 17 : ] , asname = mod ) ] ) for mod in new_imports ] node . body = imports + node . body self . update |= bool ( imports ) return node
When we normalize call we need to add correct import for method to function transformation .
116
17
231,681
def renamer ( v , cur_module ) : mname = demangle ( v ) name = v + '_' if name in cur_module : return name , mname else : return v , mname
Rename function path to fit Pythonic naming .
45
10
231,682
def visit_Call ( self , node ) : node = self . generic_visit ( node ) # Only attributes function can be Pythonic and should be normalized if isinstance ( node . func , ast . Attribute ) : if node . func . attr in methods : # Get object targeted by methods obj = lhs = node . func . value # Get the most left identifier to check if it is not an # imported module while isinstance ( obj , ast . Attribute ) : obj = obj . value is_not_module = ( not isinstance ( obj , ast . Name ) or obj . id not in self . imports ) if is_not_module : self . update = True # As it was a methods call, push targeted object as first # arguments and add correct module prefix node . args . insert ( 0 , lhs ) mod = methods [ node . func . attr ] [ 0 ] # Submodules import full module self . to_import . add ( mangle ( mod [ 0 ] ) ) node . func = reduce ( lambda v , o : ast . Attribute ( v , o , ast . Load ( ) ) , mod [ 1 : ] + ( node . func . attr , ) , ast . Name ( mangle ( mod [ 0 ] ) , ast . Load ( ) , None ) ) # else methods have been called using function syntax if node . func . attr in methods or node . func . attr in functions : # Now, methods and function have both function syntax def rec ( path , cur_module ) : """ Recursively rename path content looking in matching module. Prefers __module__ to module if it exists. This recursion is done as modules are visited top->bottom while attributes have to be visited bottom->top. """ err = "Function path is chained attributes and name" assert isinstance ( path , ( ast . Name , ast . Attribute ) ) , err if isinstance ( path , ast . Attribute ) : new_node , cur_module = rec ( path . value , cur_module ) new_id , mname = self . renamer ( path . attr , cur_module ) return ( ast . Attribute ( new_node , new_id , ast . Load ( ) ) , cur_module [ mname ] ) else : new_id , mname = self . renamer ( path . id , cur_module ) if mname not in cur_module : raise PythranSyntaxError ( "Unbound identifier '{}'" . format ( mname ) , node ) return ( ast . Name ( new_id , ast . Load ( ) , None ) , cur_module [ mname ] ) # Rename module path to avoid naming issue. node . func . value , _ = rec ( node . func . value , MODULES ) self . update = True return node
Transform call site to have normal function call .
602
9
231,683
def _extract_specs_dependencies ( specs ) : deps = set ( ) # for each function for signatures in specs . functions . values ( ) : # for each signature for signature in signatures : # for each argument for t in signature : deps . update ( pytype_to_deps ( t ) ) # and each capsule for signature in specs . capsules . values ( ) : # for each argument for t in signature : deps . update ( pytype_to_deps ( t ) ) # Keep "include" first return sorted ( deps , key = lambda x : "include" not in x )
Extract types dependencies from specs for each exported signature .
133
11
231,684
def _parse_optimization ( optimization ) : splitted = optimization . split ( '.' ) if len ( splitted ) == 1 : splitted = [ 'pythran' , 'optimizations' ] + splitted return reduce ( getattr , splitted [ 1 : ] , __import__ ( splitted [ 0 ] ) )
Turns an optimization of the form my_optim my_package . my_optim into the associated symbol
71
21
231,685
def _write_temp ( content , suffix ) : with NamedTemporaryFile ( mode = 'w' , suffix = suffix , delete = False ) as out : out . write ( content ) return out . name
write content to a temporary XXX suffix file and return the filename . It is user s responsibility to delete when done .
44
23
231,686
def front_middle_end ( module_name , code , optimizations = None , module_dir = None ) : pm = PassManager ( module_name , module_dir ) # front end ir , renamings , docstrings = frontend . parse ( pm , code ) # middle-end if optimizations is None : optimizations = cfg . get ( 'pythran' , 'optimizations' ) . split ( ) optimizations = [ _parse_optimization ( opt ) for opt in optimizations ] refine ( pm , ir , optimizations ) return pm , ir , renamings , docstrings
Front - end and middle - end compilation steps
124
9
231,687
def generate_py ( module_name , code , optimizations = None , module_dir = None ) : pm , ir , _ , _ = front_middle_end ( module_name , code , optimizations , module_dir ) return pm . dump ( Python , ir )
python + pythran spec - > py code
57
10
231,688
def compile_cxxfile ( module_name , cxxfile , output_binary = None , * * kwargs ) : builddir = mkdtemp ( ) buildtmp = mkdtemp ( ) extension_args = make_extension ( python = True , * * kwargs ) extension = PythranExtension ( module_name , [ cxxfile ] , * * extension_args ) try : setup ( name = module_name , ext_modules = [ extension ] , cmdclass = { "build_ext" : PythranBuildExt } , # fake CLI call script_name = 'setup.py' , script_args = [ '--verbose' if logger . isEnabledFor ( logging . INFO ) else '--quiet' , 'build_ext' , '--build-lib' , builddir , '--build-temp' , buildtmp ] ) except SystemExit as e : raise CompileError ( str ( e ) ) def copy ( src_file , dest_file ) : # not using shutil.copy because it fails to copy stat across devices with open ( src_file , 'rb' ) as src : with open ( dest_file , 'wb' ) as dest : dest . write ( src . read ( ) ) ext = sysconfig . get_config_var ( 'SO' ) # Copy all generated files including the module name prefix (.pdb, ...) for f in glob . glob ( os . path . join ( builddir , module_name + "*" ) ) : if f . endswith ( ext ) : if not output_binary : output_binary = os . path . join ( os . getcwd ( ) , module_name + ext ) copy ( f , output_binary ) else : if not output_binary : output_directory = os . getcwd ( ) else : output_directory = os . path . dirname ( output_binary ) copy ( f , os . path . join ( output_directory , os . path . basename ( f ) ) ) shutil . rmtree ( builddir ) shutil . rmtree ( buildtmp ) logger . info ( "Generated module: " + module_name ) logger . info ( "Output: " + output_binary ) return output_binary
c ++ file - > native module Return the filename of the produced shared library Raises CompileError on failure
486
22
231,689
def compile_pythranfile ( file_path , output_file = None , module_name = None , cpponly = False , pyonly = False , * * kwargs ) : if not output_file : # derive module name from input file name _ , basename = os . path . split ( file_path ) module_name = module_name or os . path . splitext ( basename ) [ 0 ] else : # derive module name from destination output_file name _ , basename = os . path . split ( output_file ) module_name = module_name or basename . split ( "." , 1 ) [ 0 ] module_dir = os . path . dirname ( file_path ) # Look for an extra spec file spec_file = os . path . splitext ( file_path ) [ 0 ] + '.pythran' if os . path . isfile ( spec_file ) : specs = load_specfile ( open ( spec_file ) . read ( ) ) kwargs . setdefault ( 'specs' , specs ) output_file = compile_pythrancode ( module_name , open ( file_path ) . read ( ) , output_file = output_file , cpponly = cpponly , pyonly = pyonly , module_dir = module_dir , * * kwargs ) return output_file
Pythran file - > c ++ file - > native module .
300
14
231,690
def nest_reducer ( x , g ) : def wrap_in_ifs ( node , ifs ) : """ Wrap comprehension content in all possibles if clauses. Examples -------- >> [i for i in xrange(2) if i < 3 if 0 < i] Becomes >> for i in xrange(2): >> if i < 3: >> if 0 < i: >> ... the code from `node` ... Note the nested ifs clauses. """ return reduce ( lambda n , if_ : ast . If ( if_ , [ n ] , [ ] ) , ifs , node ) return ast . For ( g . target , g . iter , [ wrap_in_ifs ( x , g . ifs ) ] , [ ] )
Create a ast . For node from a comprehension and another node .
160
13
231,691
def inline ( self ) : tp_lines , tp_decl = self . get_decl_pair ( ) tp_lines = " " . join ( tp_lines ) if tp_decl is None : return tp_lines else : return "%s %s" % ( tp_lines , tp_decl )
Return the declarator as a single line .
73
10
231,692
def get_decl_pair ( self ) : def get_tp ( ) : """ Iterator generating lines for struct definition. """ decl = "struct " if self . tpname is not None : decl += self . tpname if self . inherit is not None : decl += " : " + self . inherit yield decl yield "{" for f in self . fields : for f_line in f . generate ( ) : yield " " + f_line yield "} " return get_tp ( ) , ""
See Declarator . get_decl_pair .
108
11
231,693
def make_control_flow_handlers ( self , cont_n , status_n , expected_return , has_cont , has_break ) : if expected_return : assign = cont_ass = [ ast . Assign ( [ ast . Tuple ( expected_return , ast . Store ( ) ) ] , ast . Name ( cont_n , ast . Load ( ) , None ) ) ] else : assign = cont_ass = [ ] if has_cont : cmpr = ast . Compare ( ast . Name ( status_n , ast . Load ( ) , None ) , [ ast . Eq ( ) ] , [ ast . Num ( LOOP_CONT ) ] ) cont_ass = [ ast . If ( cmpr , deepcopy ( assign ) + [ ast . Continue ( ) ] , cont_ass ) ] if has_break : cmpr = ast . Compare ( ast . Name ( status_n , ast . Load ( ) , None ) , [ ast . Eq ( ) ] , [ ast . Num ( LOOP_BREAK ) ] ) cont_ass = [ ast . If ( cmpr , deepcopy ( assign ) + [ ast . Break ( ) ] , cont_ass ) ] return cont_ass
Create the statements in charge of gathering control flow information for the static_if result and executes the expected control flow instruction
263
23
231,694
def visit ( self , node ) : if isinstance ( node , Placeholder ) : return self . placeholders [ node . id ] else : return super ( PlaceholderReplace , self ) . visit ( node )
Replace the placeholder if it is one or continue .
45
11
231,695
def visit ( self , node ) : for pattern , replace in know_pattern : check = Check ( node , dict ( ) ) if check . visit ( pattern ) : node = PlaceholderReplace ( check . placeholders ) . visit ( replace ( ) ) self . update = True return super ( PatternTransform , self ) . visit ( node )
Try to replace if node match the given pattern or keep going .
72
13
231,696
def visit_Name ( self , node ) : if isinstance ( node . ctx , ( ast . Store , ast . Param ) ) : self . result . add ( node . id )
Any node with Store or Param context is a new identifier .
40
12
231,697
def visit_FunctionDef ( self , node ) : self . result . add ( node . name ) self . generic_visit ( node )
Function name is a possible identifier .
30
7
231,698
def visit_If ( self , node ) : currs = ( node , ) raises = ( ) # true branch for n in node . body : self . result . add_node ( n ) for curr in currs : self . result . add_edge ( curr , n ) currs , nraises = self . visit ( n ) raises += nraises if is_true_predicate ( node . test ) : return currs , raises # false branch tcurrs = currs currs = ( node , ) for n in node . orelse : self . result . add_node ( n ) for curr in currs : self . result . add_edge ( curr , n ) currs , nraises = self . visit ( n ) raises += nraises return tcurrs + currs , raises
OUT = true branch U false branch RAISES = true branch U false branch
179
16
231,699
def visit_Try ( self , node ) : currs = ( node , ) raises = ( ) for handler in node . handlers : self . result . add_node ( handler ) for n in node . body : self . result . add_node ( n ) for curr in currs : self . result . add_edge ( curr , n ) currs , nraises = self . visit ( n ) for nraise in nraises : if isinstance ( nraise , ast . Raise ) : for handler in node . handlers : self . result . add_edge ( nraise , handler ) else : raises += ( nraise , ) for handler in node . handlers : ncurrs , nraises = self . visit ( handler ) currs += ncurrs raises += nraises return currs , raises
OUT = body s U handler s RAISES = handler s this equation is not has good has it could be ... but we need type information to be more accurate
174
33