idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
22,100
def __ensure_suffix_stem ( t , suffix ) : tpath = str ( t ) if not tpath . endswith ( suffix ) : stem = tpath tpath += suffix return tpath , stem else : stem , ext = os . path . splitext ( tpath ) return t , stem
Ensure that the target t has the given suffix and return the file s stem .
22,101
def __create_output_dir ( base_dir ) : root , tail = os . path . split ( base_dir ) dir = None if tail : if base_dir . endswith ( '/' ) : dir = base_dir else : dir = root else : if base_dir . endswith ( '/' ) : dir = base_dir if dir and not os . path . isdir ( dir ) : os . makedirs ( dir )
Ensure that the output directory base_dir exists .
22,102
def __detect_cl_tool ( env , chainkey , cdict , cpriority = None ) : if env . get ( chainkey , '' ) == '' : clpath = '' if cpriority is None : cpriority = cdict . keys ( ) for cltool in cpriority : if __debug_tool_location : print ( "DocBook: Looking for %s" % cltool ) clpath = env . WhereIs ( cltool ) if clpath : if __debug_tool_location : print ( "DocBook: Found:%s" % cltool ) env [ chainkey ] = clpath if not env [ chainkey + 'COM' ] : env [ chainkey + 'COM' ] = cdict [ cltool ] break
Helper function picks a command line tool from the list and initializes its environment variables .
22,103
def _detect ( env ) : global prefer_xsltproc if env . get ( 'DOCBOOK_PREFER_XSLTPROC' , '' ) : prefer_xsltproc = True if ( ( not has_libxml2 and not has_lxml ) or ( prefer_xsltproc ) ) : __detect_cl_tool ( env , 'DOCBOOK_XSLTPROC' , xsltproc_com , xsltproc_com_priority ) __detect_cl_tool ( env , 'DOCBOOK_XMLLINT' , xmllint_com ) __detect_cl_tool ( env , 'DOCBOOK_FOP' , fop_com , [ 'fop' , 'xep' , 'jw' ] )
Detect all the command line tools that we might need for creating the requested output formats .
22,104
def __xml_scan ( node , env , path , arg ) : if not os . path . isfile ( str ( node ) ) : return [ ] if env . get ( 'DOCBOOK_SCANENT' , '' ) : contents = node . get_text_contents ( ) return sentity_re . findall ( contents ) xsl_file = os . path . join ( scriptpath , 'utils' , 'xmldepend.xsl' ) if not has_libxml2 or prefer_xsltproc : if has_lxml and not prefer_xsltproc : from lxml import etree xsl_tree = etree . parse ( xsl_file ) doc = etree . parse ( str ( node ) ) result = doc . xslt ( xsl_tree ) depfiles = [ x . strip ( ) for x in str ( result ) . splitlines ( ) if x . strip ( ) != "" and not x . startswith ( "<?xml " ) ] return depfiles else : xsltproc = env . subst ( "$DOCBOOK_XSLTPROC" ) if xsltproc and xsltproc . endswith ( 'xsltproc' ) : result = env . backtick ( ' ' . join ( [ xsltproc , xsl_file , str ( node ) ] ) ) depfiles = [ x . strip ( ) for x in str ( result ) . splitlines ( ) if x . strip ( ) != "" and not x . startswith ( "<?xml " ) ] return depfiles else : contents = node . get_text_contents ( ) return include_re . findall ( contents ) styledoc = libxml2 . parseFile ( xsl_file ) style = libxslt . parseStylesheetDoc ( styledoc ) doc = libxml2 . readFile ( str ( node ) , None , libxml2 . XML_PARSE_NOENT ) result = style . applyStylesheet ( doc , None ) depfiles = [ ] for x in str ( result ) . splitlines ( ) : if x . strip ( ) != "" and not x . startswith ( "<?xml " ) : depfiles . extend ( x . strip ( ) . split ( ) ) style . freeStylesheet ( ) doc . freeDoc ( ) result . freeDoc ( ) return depfiles
Simple XML file scanner detecting local images and XIncludes as implicit dependencies .
22,105
def __xinclude_libxml2 ( target , source , env ) : doc = libxml2 . readFile ( str ( source [ 0 ] ) , None , libxml2 . XML_PARSE_NOENT ) doc . xincludeProcessFlags ( libxml2 . XML_PARSE_NOENT ) doc . saveFile ( str ( target [ 0 ] ) ) doc . freeDoc ( ) return None
Resolving XIncludes using the libxml2 module .
22,106
def __xinclude_lxml ( target , source , env ) : from lxml import etree doc = etree . parse ( str ( source [ 0 ] ) ) doc . xinclude ( ) try : doc . write ( str ( target [ 0 ] ) , xml_declaration = True , encoding = "UTF-8" , pretty_print = True ) except : pass return None
Resolving XIncludes using the lxml module .
22,107
def DocbookHtml ( env , target , source = None , * args , ** kw ) : target , source = __extend_targets_sources ( target , source ) __init_xsl_stylesheet ( kw , env , '$DOCBOOK_DEFAULT_XSL_HTML' , [ 'html' , 'docbook.xsl' ] ) __builder = __select_builder ( __lxml_builder , __libxml2_builder , __xsltproc_builder ) result = [ ] for t , s in zip ( target , source ) : r = __builder . __call__ ( env , __ensure_suffix ( t , '.html' ) , s , ** kw ) env . Depends ( r , kw [ 'DOCBOOK_XSL' ] ) result . extend ( r ) return result
A pseudo - Builder providing a Docbook toolchain for HTML output .
22,108
def DocbookMan ( env , target , source = None , * args , ** kw ) : target , source = __extend_targets_sources ( target , source ) __init_xsl_stylesheet ( kw , env , '$DOCBOOK_DEFAULT_XSL_MAN' , [ 'manpages' , 'docbook.xsl' ] ) __builder = __select_builder ( __lxml_builder , __libxml2_builder , __xsltproc_builder ) result = [ ] for t , s in zip ( target , source ) : volnum = "1" outfiles = [ ] srcfile = __ensure_suffix ( str ( s ) , '.xml' ) if os . path . isfile ( srcfile ) : try : import xml . dom . minidom dom = xml . dom . minidom . parse ( __ensure_suffix ( str ( s ) , '.xml' ) ) for node in dom . getElementsByTagName ( 'refmeta' ) : for vol in node . getElementsByTagName ( 'manvolnum' ) : volnum = __get_xml_text ( vol ) for node in dom . getElementsByTagName ( 'refnamediv' ) : for ref in node . getElementsByTagName ( 'refname' ) : outfiles . append ( __get_xml_text ( ref ) + '.' + volnum ) except : f = open ( __ensure_suffix ( str ( s ) , '.xml' ) , 'r' ) content = f . read ( ) f . close ( ) for m in re_manvolnum . finditer ( content ) : volnum = m . group ( 1 ) for m in re_refname . finditer ( content ) : outfiles . append ( m . group ( 1 ) + '.' + volnum ) if not outfiles : spath = str ( s ) if not spath . endswith ( '.xml' ) : outfiles . append ( spath + '.' + volnum ) else : stem , ext = os . path . splitext ( spath ) outfiles . append ( stem + '.' + volnum ) else : outfiles . append ( t ) __builder . __call__ ( env , outfiles [ 0 ] , s , ** kw ) env . Depends ( outfiles [ 0 ] , kw [ 'DOCBOOK_XSL' ] ) result . append ( outfiles [ 0 ] ) if len ( outfiles ) > 1 : env . Clean ( outfiles [ 0 ] , outfiles [ 1 : ] ) return result
A pseudo - Builder providing a Docbook toolchain for Man page output .
22,109
def DocbookSlidesPdf ( env , target , source = None , * args , ** kw ) : target , source = __extend_targets_sources ( target , source ) __init_xsl_stylesheet ( kw , env , '$DOCBOOK_DEFAULT_XSL_SLIDESPDF' , [ 'slides' , 'fo' , 'plain.xsl' ] ) __builder = __select_builder ( __lxml_builder , __libxml2_builder , __xsltproc_builder ) result = [ ] for t , s in zip ( target , source ) : t , stem = __ensure_suffix_stem ( t , '.pdf' ) xsl = __builder . __call__ ( env , stem + '.fo' , s , ** kw ) env . Depends ( xsl , kw [ 'DOCBOOK_XSL' ] ) result . extend ( xsl ) result . extend ( __fop_builder . __call__ ( env , t , xsl , ** kw ) ) return result
A pseudo - Builder providing a Docbook toolchain for PDF slides output .
22,110
def DocbookSlidesHtml ( env , target , source = None , * args , ** kw ) : if not SCons . Util . is_List ( target ) : target = [ target ] if not source : source = target target = [ 'index.html' ] elif not SCons . Util . is_List ( source ) : source = [ source ] __init_xsl_stylesheet ( kw , env , '$DOCBOOK_DEFAULT_XSL_SLIDESHTML' , [ 'slides' , 'html' , 'plain.xsl' ] ) __builder = __select_builder ( __lxml_builder , __libxml2_builder , __xsltproc_builder ) base_dir = kw . get ( 'base_dir' , '' ) if base_dir : __create_output_dir ( base_dir ) result = [ ] r = __builder . __call__ ( env , __ensure_suffix ( str ( target [ 0 ] ) , '.html' ) , source [ 0 ] , ** kw ) env . Depends ( r , kw [ 'DOCBOOK_XSL' ] ) result . extend ( r ) env . Clean ( r , [ os . path . join ( base_dir , 'toc.html' ) ] + glob . glob ( os . path . join ( base_dir , 'foil*.html' ) ) ) return result
A pseudo - Builder providing a Docbook toolchain for HTML slides output .
22,111
def DocbookXInclude ( env , target , source , * args , ** kw ) : target , source = __extend_targets_sources ( target , source ) __builder = __select_builder ( __xinclude_lxml_builder , __xinclude_libxml2_builder , __xmllint_builder ) result = [ ] for t , s in zip ( target , source ) : result . extend ( __builder . __call__ ( env , t , s , ** kw ) ) return result
A pseudo - Builder for resolving XIncludes in a separate processing step .
22,112
def DocbookXslt ( env , target , source = None , * args , ** kw ) : target , source = __extend_targets_sources ( target , source ) kw [ 'DOCBOOK_XSL' ] = kw . get ( 'xsl' , 'transform.xsl' ) __builder = __select_builder ( __lxml_builder , __libxml2_builder , __xsltproc_builder ) result = [ ] for t , s in zip ( target , source ) : r = __builder . __call__ ( env , t , s , ** kw ) env . Depends ( r , kw [ 'DOCBOOK_XSL' ] ) result . extend ( r ) return result
A pseudo - Builder applying a simple XSL transformation to the input file .
22,113
def generate ( env ) : env . SetDefault ( DOCBOOK_DEFAULT_XSL_EPUB = '' , DOCBOOK_DEFAULT_XSL_HTML = '' , DOCBOOK_DEFAULT_XSL_HTMLCHUNKED = '' , DOCBOOK_DEFAULT_XSL_HTMLHELP = '' , DOCBOOK_DEFAULT_XSL_PDF = '' , DOCBOOK_DEFAULT_XSL_MAN = '' , DOCBOOK_DEFAULT_XSL_SLIDESPDF = '' , DOCBOOK_DEFAULT_XSL_SLIDESHTML = '' , DOCBOOK_XSLTPROC = '' , DOCBOOK_XMLLINT = '' , DOCBOOK_FOP = '' , DOCBOOK_XSLTPROCFLAGS = SCons . Util . CLVar ( '' ) , DOCBOOK_XMLLINTFLAGS = SCons . Util . CLVar ( '' ) , DOCBOOK_FOPFLAGS = SCons . Util . CLVar ( '' ) , DOCBOOK_XSLTPROCPARAMS = SCons . Util . CLVar ( '' ) , DOCBOOK_XSLTPROCCOM = xsltproc_com [ 'xsltproc' ] , DOCBOOK_XMLLINTCOM = xmllint_com [ 'xmllint' ] , DOCBOOK_FOPCOM = fop_com [ 'fop' ] , DOCBOOK_XSLTPROCCOMSTR = None , DOCBOOK_XMLLINTCOMSTR = None , DOCBOOK_FOPCOMSTR = None , ) _detect ( env ) env . AddMethod ( DocbookEpub , "DocbookEpub" ) env . AddMethod ( DocbookHtml , "DocbookHtml" ) env . AddMethod ( DocbookHtmlChunked , "DocbookHtmlChunked" ) env . AddMethod ( DocbookHtmlhelp , "DocbookHtmlhelp" ) env . AddMethod ( DocbookPdf , "DocbookPdf" ) env . AddMethod ( DocbookMan , "DocbookMan" ) env . AddMethod ( DocbookSlidesPdf , "DocbookSlidesPdf" ) env . AddMethod ( DocbookSlidesHtml , "DocbookSlidesHtml" ) env . AddMethod ( DocbookXInclude , "DocbookXInclude" ) env . AddMethod ( DocbookXslt , "DocbookXslt" )
Add Builders and construction variables for docbook to an Environment .
22,114
def save ( self ) : try : with open ( self . path , "w" ) as f : f . writelines ( self . contents ) except IOError as e : raise InternalError ( "Could not write RCFile contents" , name = self . name , path = self . path , error_message = str ( e ) )
Update the configuration file on disk with the current contents of self . contents . Previous contents are overwritten .
22,115
async def probe_message ( self , _message , context ) : client_id = context . user_data await self . probe ( client_id )
Handle a probe message .
22,116
async def connect_message ( self , message , context ) : conn_string = message . get ( 'connection_string' ) client_id = context . user_data await self . connect ( client_id , conn_string )
Handle a connect message .
22,117
async def disconnect_message ( self , message , context ) : conn_string = message . get ( 'connection_string' ) client_id = context . user_data await self . disconnect ( client_id , conn_string )
Handle a disconnect message .
22,118
async def open_interface_message ( self , message , context ) : conn_string = message . get ( 'connection_string' ) interface = message . get ( 'interface' ) client_id = context . user_data await self . open_interface ( client_id , conn_string , interface )
Handle an open_interface message .
22,119
async def close_interface_message ( self , message , context ) : conn_string = message . get ( 'connection_string' ) interface = message . get ( 'interface' ) client_id = context . user_data await self . close_interface ( client_id , conn_string , interface )
Handle a close_interface message .
22,120
async def send_rpc_message ( self , message , context ) : conn_string = message . get ( 'connection_string' ) rpc_id = message . get ( 'rpc_id' ) address = message . get ( 'address' ) timeout = message . get ( 'timeout' ) payload = message . get ( 'payload' ) client_id = context . user_data self . _logger . debug ( "Calling RPC %d:0x%04X with payload %s on %s" , address , rpc_id , payload , conn_string ) response = bytes ( ) err = None try : response = await self . send_rpc ( client_id , conn_string , address , rpc_id , payload , timeout = timeout ) except VALID_RPC_EXCEPTIONS as internal_err : err = internal_err except ( DeviceAdapterError , DeviceServerError ) : raise except Exception as internal_err : self . _logger . warning ( "Unexpected exception calling RPC %d:0x%04x" , address , rpc_id , exc_info = True ) raise ServerCommandError ( 'send_rpc' , str ( internal_err ) ) from internal_err status , response = pack_rpc_response ( response , err ) return { 'status' : status , 'payload' : base64 . b64encode ( response ) }
Handle a send_rpc message .
22,121
async def send_script_message ( self , message , context ) : script = message . get ( 'script' ) conn_string = message . get ( 'connection_string' ) client_id = context . user_data if message . get ( 'fragment_count' ) != 1 : raise DeviceServerError ( client_id , conn_string , 'send_script' , 'fragmented scripts are not yet supported' ) await self . send_script ( client_id , conn_string , script )
Handle a send_script message .
22,122
async def debug_command_message ( self , message , context ) : conn_string = message . get ( 'connection_string' ) command = message . get ( 'command' ) args = message . get ( 'args' ) client_id = context . user_data result = await self . debug ( client_id , conn_string , command , args ) return result
Handle a debug message .
22,123
async def client_event_handler ( self , client_id , event_tuple , user_data ) : conn_string , event_name , event = event_tuple if event_name == 'report' : report = event . serialize ( ) report [ 'encoded_report' ] = base64 . b64encode ( report [ 'encoded_report' ] ) msg_payload = dict ( connection_string = conn_string , serialized_report = report ) msg_name = OPERATIONS . NOTIFY_REPORT elif event_name == 'trace' : encoded_payload = base64 . b64encode ( event ) msg_payload = dict ( connection_string = conn_string , payload = encoded_payload ) msg_name = OPERATIONS . NOTIFY_TRACE elif event_name == 'progress' : msg_payload = dict ( connection_string = conn_string , operation = event . get ( 'operation' ) , done_count = event . get ( 'finished' ) , total_count = event . get ( 'total' ) ) msg_name = OPERATIONS . NOTIFY_PROGRESS elif event_name == 'device_seen' : msg_payload = event msg_name = OPERATIONS . NOTIFY_DEVICE_FOUND elif event_name == 'broadcast' : report = event . serialize ( ) report [ 'encoded_report' ] = base64 . b64encode ( report [ 'encoded_report' ] ) msg_payload = dict ( connection_string = conn_string , serialized_report = report ) msg_name = OPERATIONS . NOTIFY_BROADCAST else : self . _logger . debug ( "Not forwarding unknown event over websockets: %s" , event_tuple ) return try : self . _logger . debug ( "Sending event %s: %s" , msg_name , msg_payload ) await self . server . send_event ( user_data , msg_name , msg_payload ) except websockets . exceptions . ConnectionClosed : self . _logger . debug ( "Could not send notification because connection was closed for client %s" , client_id )
Forward an event on behalf of a client .
22,124
def generate ( env ) : add_all_to_env ( env ) fcomp = env . Detect ( compilers ) or 'f90' env [ 'FORTRAN' ] = fcomp env [ 'F90' ] = fcomp env [ 'SHFORTRAN' ] = '$FORTRAN' env [ 'SHF90' ] = '$F90' env [ 'SHFORTRANFLAGS' ] = SCons . Util . CLVar ( '$FORTRANFLAGS -KPIC' ) env [ 'SHF90FLAGS' ] = SCons . Util . CLVar ( '$F90FLAGS -KPIC' )
Add Builders and construction variables for sun f90 compiler to an Environment .
22,125
def Builder ( ** kw ) : composite = None if 'generator' in kw : if 'action' in kw : raise UserError ( "You must not specify both an action and a generator." ) kw [ 'action' ] = SCons . Action . CommandGeneratorAction ( kw [ 'generator' ] , { } ) del kw [ 'generator' ] elif 'action' in kw : source_ext_match = kw . get ( 'source_ext_match' , 1 ) if 'source_ext_match' in kw : del kw [ 'source_ext_match' ] if SCons . Util . is_Dict ( kw [ 'action' ] ) : composite = DictCmdGenerator ( kw [ 'action' ] , source_ext_match ) kw [ 'action' ] = SCons . Action . CommandGeneratorAction ( composite , { } ) kw [ 'src_suffix' ] = composite . src_suffixes ( ) else : kw [ 'action' ] = SCons . Action . Action ( kw [ 'action' ] ) if 'emitter' in kw : emitter = kw [ 'emitter' ] if SCons . Util . is_String ( emitter ) : var = SCons . Util . get_environment_var ( emitter ) if not var : raise UserError ( "Supplied emitter '%s' does not appear to refer to an Environment variable" % emitter ) kw [ 'emitter' ] = EmitterProxy ( var ) elif SCons . Util . is_Dict ( emitter ) : kw [ 'emitter' ] = DictEmitter ( emitter ) elif SCons . Util . is_List ( emitter ) : kw [ 'emitter' ] = ListEmitter ( emitter ) result = BuilderBase ( ** kw ) if not composite is None : result = CompositeBuilder ( result , composite ) return result
A factory for builder objects .
22,126
def _node_errors ( builder , env , tlist , slist ) : for t in tlist : if t . side_effect : raise UserError ( "Multiple ways to build the same target were specified for: %s" % t ) if t . has_explicit_builder ( ) : if ( not t . env is None and not t . env is env and not ( getattr ( t . env , '__subject' , 0 ) is getattr ( env , '__subject' , 1 ) and getattr ( t . env , 'overrides' , 0 ) == getattr ( env , 'overrides' , 1 ) and not builder . multi ) ) : action = t . builder . action t_contents = t . builder . action . get_contents ( tlist , slist , t . env ) contents = builder . action . get_contents ( tlist , slist , env ) if t_contents == contents : msg = "Two different environments were specified for target %s,\n\tbut they appear to have the same action: %s" % ( t , action . genstring ( tlist , slist , t . env ) ) SCons . Warnings . warn ( SCons . Warnings . DuplicateEnvironmentWarning , msg ) else : try : msg = "Two environments with different actions were specified for the same target: %s\n(action 1: %s)\n(action 2: %s)" % ( t , t_contents . decode ( 'utf-8' ) , contents . decode ( 'utf-8' ) ) except UnicodeDecodeError as e : msg = "Two environments with different actions were specified for the same target: %s" % t raise UserError ( msg ) if builder . multi : if t . builder != builder : msg = "Two different builders (%s and %s) were specified for the same target: %s" % ( t . builder . get_name ( env ) , builder . get_name ( env ) , t ) raise UserError ( msg ) if t . get_executor ( ) . get_all_targets ( ) != tlist : msg = "Two different target lists have a target in common: %s (from %s and from %s)" % ( t , list ( map ( str , t . get_executor ( ) . get_all_targets ( ) ) ) , list ( map ( str , tlist ) ) ) raise UserError ( msg ) elif t . sources != slist : msg = "Multiple ways to build the same target were specified for: %s (from %s and from %s)" % ( t , list ( map ( str , t . sources ) ) , list ( map ( str , slist ) ) ) raise UserError ( msg ) if builder . single_source : if len ( slist ) > 1 : raise UserError ( "More than one source given for single-source builder: targets=%s sources=%s" % ( list ( map ( str , tlist ) ) , list ( map ( str , slist ) ) ) )
Validate that the lists of target and source nodes are legal for this builder and environment . Raise errors or issue warnings as appropriate .
22,127
def is_a_Builder ( obj ) : return ( isinstance ( obj , BuilderBase ) or isinstance ( obj , CompositeBuilder ) or callable ( obj ) )
Returns True if the specified obj is one of our Builder classes .
22,128
def get_name ( self , env ) : try : index = list ( env [ 'BUILDERS' ] . values ( ) ) . index ( self ) return list ( env [ 'BUILDERS' ] . keys ( ) ) [ index ] except ( AttributeError , KeyError , TypeError , ValueError ) : try : return self . name except AttributeError : return str ( self . __class__ )
Attempts to get the name of the Builder .
22,129
def _create_nodes ( self , env , target = None , source = None ) : src_suf = self . get_src_suffix ( env ) target_factory = env . get_factory ( self . target_factory ) source_factory = env . get_factory ( self . source_factory ) source = self . _adjustixes ( source , None , src_suf ) slist = env . arg2nodes ( source , source_factory ) pre = self . get_prefix ( env , slist ) suf = self . get_suffix ( env , slist ) if target is None : try : t_from_s = slist [ 0 ] . target_from_source except AttributeError : raise UserError ( "Do not know how to create a target from source `%s'" % slist [ 0 ] ) except IndexError : tlist = [ ] else : splitext = lambda S : self . splitext ( S , env ) tlist = [ t_from_s ( pre , suf , splitext ) ] else : target = self . _adjustixes ( target , pre , suf , self . ensure_suffix ) tlist = env . arg2nodes ( target , target_factory , target = target , source = source ) if self . emitter : new_targets = [ ] for t in tlist : if not t . is_derived ( ) : t . builder_set ( self ) new_targets . append ( t ) orig_tlist = tlist [ : ] orig_slist = slist [ : ] target , source = self . emitter ( target = tlist , source = slist , env = env ) for t in new_targets : if t . builder is self : t . builder_set ( None ) tlist = env . arg2nodes ( target , target_factory , target = orig_tlist , source = orig_slist ) slist = env . arg2nodes ( source , source_factory , target = orig_tlist , source = orig_slist ) return tlist , slist
Create and return lists of target and source nodes .
22,130
def _get_sdict ( self , env ) : sdict = { } for bld in self . get_src_builders ( env ) : for suf in bld . src_suffixes ( env ) : sdict [ suf ] = bld return sdict
Returns a dictionary mapping all of the source suffixes of all src_builders of this Builder to the underlying Builder that should be called first .
22,131
def get_src_builders ( self , env ) : memo_key = id ( env ) try : memo_dict = self . _memo [ 'get_src_builders' ] except KeyError : memo_dict = { } self . _memo [ 'get_src_builders' ] = memo_dict else : try : return memo_dict [ memo_key ] except KeyError : pass builders = [ ] for bld in self . src_builder : if SCons . Util . is_String ( bld ) : try : bld = env [ 'BUILDERS' ] [ bld ] except KeyError : continue builders . append ( bld ) memo_dict [ memo_key ] = builders return builders
Returns the list of source Builders for this Builder .
22,132
def subst_src_suffixes ( self , env ) : memo_key = id ( env ) try : memo_dict = self . _memo [ 'subst_src_suffixes' ] except KeyError : memo_dict = { } self . _memo [ 'subst_src_suffixes' ] = memo_dict else : try : return memo_dict [ memo_key ] except KeyError : pass suffixes = [ env . subst ( x ) for x in self . src_suffix ] memo_dict [ memo_key ] = suffixes return suffixes
The suffix list may contain construction variable expansions so we have to evaluate the individual strings . To avoid doing this over and over we memoize the results for each construction environment .
22,133
def src_suffixes ( self , env ) : sdict = { } suffixes = self . subst_src_suffixes ( env ) for s in suffixes : sdict [ s ] = 1 for builder in self . get_src_builders ( env ) : for s in builder . src_suffixes ( env ) : if s not in sdict : sdict [ s ] = 1 suffixes . append ( s ) return suffixes
Returns the list of source suffixes for all src_builders of this Builder .
22,134
def generate ( env ) : link . generate ( env ) env [ 'SMARTLINKFLAGS' ] = smart_linkflags env [ 'LINKFLAGS' ] = SCons . Util . CLVar ( '$SMARTLINKFLAGS' ) env [ 'SHLINKFLAGS' ] = SCons . Util . CLVar ( '$LINKFLAGS -qmkshrobj -qsuppress=1501-218' ) env [ 'SHLIBSUFFIX' ] = '.a'
Add Builders and construction variables for Visual Age linker to an Environment .
22,135
def _parse_target ( target ) : if len ( target ) != 8 : raise ArgumentError ( "Invalid targeting data length" , expected = 8 , length = len ( target ) ) slot , match_op = struct . unpack ( "<B6xB" , target ) if match_op == _MATCH_CONTROLLER : return { 'controller' : True , 'slot' : 0 } elif match_op == _MATCH_SLOT : return { 'controller' : False , 'slot' : slot } raise ArgumentError ( "Unsupported complex targeting specified" , match_op = match_op )
Parse a binary targeting information structure .
22,136
def put_task ( self , func , args , response ) : self . _rpc_queue . put_nowait ( ( func , args , response ) )
Place a task onto the RPC queue .
22,137
def put_rpc ( self , address , rpc_id , arg_payload , response ) : self . _rpc_queue . put_nowait ( ( address , rpc_id , arg_payload , response ) )
Place an RPC onto the RPC queue .
22,138
async def stop ( self ) : if self . _rpc_task is not None : self . _rpc_task . cancel ( ) try : await self . _rpc_task except asyncio . CancelledError : pass self . _rpc_task = None
Stop the rpc queue from inside the event loop .
22,139
def add_segment ( self , address , data , overwrite = False ) : seg_type = self . _classify_segment ( address , len ( data ) ) if not isinstance ( seg_type , DisjointSegment ) : raise ArgumentError ( "Unsupported segment type" ) segment = MemorySegment ( address , address + len ( data ) - 1 , len ( data ) , bytearray ( data ) ) self . _segments . append ( segment )
Add a contiguous segment of data to this memory map
22,140
def _create_slice ( self , key ) : if isinstance ( key , slice ) : step = key . step if step is None : step = 1 if step != 1 : raise ArgumentError ( "You cannot slice with a step that is not equal to 1" , step = key . step ) start_address = key . start end_address = key . stop - 1 start_i , start_seg = self . _find_address ( start_address ) end_i , _end_seg = self . _find_address ( end_address ) if start_seg is None or start_i != end_i : raise ArgumentError ( "Slice would span invalid data in memory" , start_address = start_address , end_address = end_address ) block_offset = start_address - start_seg . start_address block_length = end_address - start_address + 1 return start_seg , block_offset , block_offset + block_length elif isinstance ( key , int ) : start_i , start_seg = self . _find_address ( key ) if start_seg is None : raise ArgumentError ( "Requested invalid address" , address = key ) return start_seg , key - start_seg . start_address , None else : raise ArgumentError ( "Unknown type of address key" , address = key )
Create a slice in a memory segment corresponding to a key .
22,141
def _classify_segment ( self , address , length ) : end_address = address + length - 1 _ , start_seg = self . _find_address ( address ) _ , end_seg = self . _find_address ( end_address ) if start_seg is not None or end_seg is not None : raise ArgumentError ( "Overlapping segments are not yet supported" , address = address , length = length ) return DisjointSegment ( )
Determine how a new data segment fits into our existing world
22,142
def generate ( env ) : fscan = FortranScan ( "FORTRANPATH" ) SCons . Tool . SourceFileScanner . add_scanner ( '.i' , fscan ) SCons . Tool . SourceFileScanner . add_scanner ( '.i90' , fscan ) if 'FORTRANFILESUFFIXES' not in env : env [ 'FORTRANFILESUFFIXES' ] = [ '.i' ] else : env [ 'FORTRANFILESUFFIXES' ] . append ( '.i' ) if 'F90FILESUFFIXES' not in env : env [ 'F90FILESUFFIXES' ] = [ '.i90' ] else : env [ 'F90FILESUFFIXES' ] . append ( '.i90' ) add_all_to_env ( env ) fc = 'ifort' for dialect in [ 'F77' , 'F90' , 'FORTRAN' , 'F95' ] : env [ '%s' % dialect ] = fc env [ 'SH%s' % dialect ] = '$%s' % dialect if env [ 'PLATFORM' ] == 'posix' : env [ 'SH%sFLAGS' % dialect ] = SCons . Util . CLVar ( '$%sFLAGS -fPIC' % dialect ) if env [ 'PLATFORM' ] == 'win32' : for dialect in [ 'F77' , 'F90' , 'FORTRAN' , 'F95' ] : for var in [ '%sCOM' % dialect , '%sPPCOM' % dialect , 'SH%sCOM' % dialect , 'SH%sPPCOM' % dialect ] : env [ var ] = env [ var ] . replace ( '-o $TARGET' , '-object:$TARGET' ) env [ 'FORTRANMODDIRPREFIX' ] = "/module:" else : env [ 'FORTRANMODDIRPREFIX' ] = "-module "
Add Builders and construction variables for ifort to an Environment .
22,143
def run ( self , postfunc = lambda : None ) : self . _setup_sig_handler ( ) try : self . job . start ( ) finally : postfunc ( ) self . _reset_sig_handler ( )
Run the jobs .
22,144
def expired ( self ) : if self . timeout is None : return False return monotonic ( ) - self . start_time > self . timeout
Boolean property if this action has expired
22,145
def begin_connection ( self , connection_id , internal_id , callback , context , timeout ) : data = { 'callback' : callback , 'connection_id' : connection_id , 'internal_id' : internal_id , 'context' : context } action = ConnectionAction ( 'begin_connection' , data , timeout = timeout , sync = False ) self . _actions . put ( action )
Asynchronously begin a connection attempt
22,146
def begin_operation ( self , conn_or_internal_id , op_name , callback , timeout ) : data = { 'id' : conn_or_internal_id , 'callback' : callback , 'operation_name' : op_name } action = ConnectionAction ( 'begin_operation' , data , timeout = timeout , sync = False ) self . _actions . put ( action )
Begin an operation on a connection
22,147
def _begin_operation_action ( self , action ) : conn_key = action . data [ 'id' ] callback = action . data [ 'callback' ] if self . _get_connection_state ( conn_key ) != self . Idle : callback ( conn_key , self . id , False , 'Cannot start operation, connection is not idle' ) return data = self . _get_connection ( conn_key ) data [ 'state' ] = self . InProgress data [ 'microstate' ] = action . data [ 'operation_name' ] data [ 'action' ] = action
Begin an attempted operation .
22,148
def allow_exception ( self , exc_class ) : name = exc_class . __name__ self . _allowed_exceptions [ name ] = exc_class
Allow raising this class of exceptions from commands .
22,149
async def start ( self , name = "websocket_client" ) : self . _con = await websockets . connect ( self . url ) self . _connection_task = self . _loop . add_task ( self . _manage_connection ( ) , name = name )
Connect to the websocket server .
22,150
async def stop ( self ) : if self . _connection_task is None : return try : await self . _connection_task . stop ( ) finally : self . _con = None self . _connection_task = None self . _manager . clear ( )
Stop this websocket client and disconnect from the server .
22,151
async def send_command ( self , command , args , validator , timeout = 10.0 ) : if self . _con is None : raise ExternalError ( "No websock connection established" ) cmd_uuid = str ( uuid . uuid4 ( ) ) msg = dict ( type = 'command' , operation = command , uuid = cmd_uuid , payload = args ) packed = pack ( msg ) response_future = self . _manager . wait_for ( type = "response" , uuid = cmd_uuid , timeout = timeout ) await self . _con . send ( packed ) response = await response_future if response . get ( 'success' ) is False : self . _raise_error ( command , response ) if validator is None : return response . get ( 'payload' ) return validator . verify ( response . get ( 'payload' ) )
Send a command and synchronously wait for a single response .
22,152
async def _manage_connection ( self ) : try : while True : message = await self . _con . recv ( ) try : unpacked = unpack ( message ) except Exception : self . _logger . exception ( "Corrupt message received" ) continue if not VALID_SERVER_MESSAGE . matches ( unpacked ) : self . _logger . warning ( "Dropping invalid message from server: %s" , unpacked ) continue if not await self . _manager . process_message ( unpacked , wait = False ) : self . _logger . warning ( "No handler found for received message, message=%s" , unpacked ) except asyncio . CancelledError : self . _logger . info ( "Closing connection to server due to stop()" ) finally : await self . _manager . process_message ( dict ( type = 'event' , name = self . DISCONNECT_EVENT , payload = None ) ) await self . _con . close ( )
Internal coroutine for managing the client connection .
22,153
def register_event ( self , name , callback , validator ) : async def _validate_and_call ( message ) : payload = message . get ( 'payload' ) try : payload = validator . verify ( payload ) except ValidationError : self . _logger . warning ( "Dropping invalid payload for event %s, payload=%s" , name , payload ) return try : result = callback ( payload ) if inspect . isawaitable ( result ) : await result except : self . _logger . error ( "Error calling callback for event %s, payload=%s" , name , payload , exc_info = True ) self . _manager . every_match ( _validate_and_call , type = "event" , name = name )
Register a callback to receive events .
22,154
def post_command ( self , command , args ) : self . _loop . log_coroutine ( self . send_command ( command , args , Verifier ( ) ) )
Post a command asynchronously and don t wait for a response .
22,155
def copy_all_a ( input_a , * other_inputs , ** kwargs ) : output = [ ] while input_a . count ( ) > 0 : output . append ( input_a . pop ( ) ) for input_x in other_inputs : input_x . skip_all ( ) return output
Copy all readings in input a into the output .
22,156
def copy_count_a ( input_a , * other_inputs , ** kwargs ) : count = input_a . count ( ) input_a . skip_all ( ) for input_x in other_inputs : input_x . skip_all ( ) return [ IOTileReading ( 0 , 0 , count ) ]
Copy the latest reading from input a into the output .
22,157
def call_rpc ( * inputs , ** kwargs ) : rpc_executor = kwargs [ 'rpc_executor' ] output = [ ] try : value = inputs [ 1 ] . pop ( ) addr = value . value >> 16 rpc_id = value . value & 0xFFFF reading_value = rpc_executor . rpc ( addr , rpc_id ) output . append ( IOTileReading ( 0 , 0 , reading_value ) ) except ( HardwareError , StreamEmptyError ) : pass for input_x in inputs : input_x . skip_all ( ) return output
Call an RPC based on the encoded value read from input b .
22,158
def trigger_streamer ( * inputs , ** kwargs ) : streamer_marker = kwargs [ 'mark_streamer' ] try : reading = inputs [ 1 ] . pop ( ) except StreamEmptyError : return [ ] finally : for input_x in inputs : input_x . skip_all ( ) try : streamer_marker ( reading . value ) except ArgumentError : return [ ] return [ IOTileReading ( 0 , 0 , 0 ) ]
Trigger a streamer based on the index read from input b .
22,159
def subtract_afromb ( * inputs , ** kwargs ) : try : value_a = inputs [ 0 ] . pop ( ) value_b = inputs [ 1 ] . pop ( ) return [ IOTileReading ( 0 , 0 , value_b . value - value_a . value ) ] except StreamEmptyError : return [ ]
Subtract stream a from stream b .
22,160
def _clean_intenum ( obj ) : if isinstance ( obj , dict ) : for key , value in obj . items ( ) : if isinstance ( value , IntEnum ) : obj [ key ] = value . value elif isinstance ( value , ( dict , list ) ) : obj [ key ] = _clean_intenum ( value ) elif isinstance ( obj , list ) : for i , value in enumerate ( obj ) : if isinstance ( value , IntEnum ) : obj [ i ] = value . value elif isinstance ( value , ( dict , list ) ) : obj [ i ] = _clean_intenum ( value ) return obj
Remove all IntEnum classes from a map .
22,161
def _track_change ( self , name , value , formatter = None ) : self . _emulation_log . track_change ( self . _emulation_address , name , value , formatter )
Track that a change happened .
22,162
def save_state ( self , out_path ) : state = self . dump_state ( ) state = _clean_intenum ( state ) with open ( out_path , "w" ) as outfile : json . dump ( state , outfile , indent = 4 )
Save the current state of this emulated object to a file .
22,163
def load_state ( self , in_path ) : with open ( in_path , "r" ) as infile : state = json . load ( infile ) self . restore_state ( state )
Load the current state of this emulated object from a file .
22,164
def load_scenario ( self , scenario_name , ** kwargs ) : scenario = self . _known_scenarios . get ( scenario_name ) if scenario is None : raise ArgumentError ( "Unknown scenario %s" % scenario_name , known_scenarios = list ( self . _known_scenarios ) ) scenario ( ** kwargs )
Load a scenario into the emulated object .
22,165
def register_scenario ( self , scenario_name , handler ) : if scenario_name in self . _known_scenarios : raise ArgumentError ( "Attempted to add the same scenario name twice" , scenario_name = scenario_name , previous_handler = self . _known_scenarios [ scenario_name ] ) self . _known_scenarios [ scenario_name ] = handler
Register a scenario handler for this object .
22,166
def generate ( env ) : cc . generate ( env ) env [ 'CXX' ] = 'aCC' env [ 'SHCCFLAGS' ] = SCons . Util . CLVar ( '$CCFLAGS +Z' )
Add Builders and construction variables for aCC & cc to an Environment .
22,167
def add_pass ( self , name , opt_pass , before = None , after = None ) : if before is None : before = [ ] if after is None : after = [ ] self . _known_passes [ name ] = ( opt_pass , before , after )
Add an optimization pass to the optimizer .
22,168
def _order_pases ( self , passes ) : passes = set ( passes ) pass_deps = { } for opt in passes : _ , before , after = self . _known_passes [ opt ] if opt not in pass_deps : pass_deps [ opt ] = set ( ) for after_pass in after : pass_deps [ opt ] . add ( after_pass ) for other in before : if other not in passes : continue if other not in pass_deps : pass_deps [ other ] = set ( ) pass_deps [ other ] . add ( opt ) return toposort_flatten ( pass_deps )
Topologically sort optimization passes .
22,169
def optimize ( self , sensor_graph , model ) : passes = self . _order_pases ( self . _known_passes . keys ( ) ) for opt_name in passes : rerun = True pass_instance = self . _known_passes [ opt_name ] [ 0 ] ( ) while rerun : rerun = pass_instance . run ( sensor_graph , model = model )
Optimize a sensor graph by running optimization passes .
22,170
def get_calling_namespaces ( ) : try : 1 // 0 except ZeroDivisionError : frame = sys . exc_info ( ) [ 2 ] . tb_frame . f_back while frame . f_globals . get ( "__name__" ) == __name__ : frame = frame . f_back return frame . f_locals , frame . f_globals
Return the locals and globals for the function that called into this module in the current call stack .
22,171
def annotate ( node ) : tb = sys . exc_info ( ) [ 2 ] while tb and stack_bottom not in tb . tb_frame . f_locals : tb = tb . tb_next if not tb : raise SCons . Errors . InternalError ( "could not find SConscript stack frame" ) node . creator = traceback . extract_stack ( tb ) [ 0 ]
Annotate a node with the stack frame describing the SConscript file and line number that created it .
22,172
def BuildDefaultGlobals ( ) : global GlobalDict if GlobalDict is None : GlobalDict = { } import SCons . Script d = SCons . Script . __dict__ def not_a_module ( m , d = d , mtype = type ( SCons . Script ) ) : return not isinstance ( d [ m ] , mtype ) for m in filter ( not_a_module , dir ( SCons . Script ) ) : GlobalDict [ m ] = d [ m ] return GlobalDict . copy ( )
Create a dictionary containing all the default globals for SConstruct and SConscript files .
22,173
def _exceeds_version ( self , major , minor , v_major , v_minor ) : return ( major > v_major or ( major == v_major and minor > v_minor ) )
Return 1 if major and minor are greater than the version in v_major and v_minor and 0 otherwise .
22,174
def EnsureSConsVersion ( self , major , minor , revision = 0 ) : if SCons . __version__ == '__' + 'VERSION__' : SCons . Warnings . warn ( SCons . Warnings . DevelopmentVersionWarning , "EnsureSConsVersion is ignored for development version" ) return scons_ver = self . _get_major_minor_revision ( SCons . __version__ ) if scons_ver < ( major , minor , revision ) : if revision : scons_ver_string = '%d.%d.%d' % ( major , minor , revision ) else : scons_ver_string = '%d.%d' % ( major , minor ) print ( "SCons %s or greater required, but you have SCons %s" % ( scons_ver_string , SCons . __version__ ) ) sys . exit ( 2 )
Exit abnormally if the SCons version is not late enough .
22,175
def EnsurePythonVersion ( self , major , minor ) : if sys . version_info < ( major , minor ) : v = sys . version . split ( ) [ 0 ] print ( "Python %d.%d or greater required, but you have Python %s" % ( major , minor , v ) ) sys . exit ( 2 )
Exit abnormally if the Python version is not late enough .
22,176
def validate_vars ( env ) : if 'PCH' in env and env [ 'PCH' ] : if 'PCHSTOP' not in env : raise SCons . Errors . UserError ( "The PCHSTOP construction must be defined if PCH is defined." ) if not SCons . Util . is_String ( env [ 'PCHSTOP' ] ) : raise SCons . Errors . UserError ( "The PCHSTOP construction variable must be a string: %r" % env [ 'PCHSTOP' ] )
Validate the PCH and PCHSTOP construction variables .
22,177
def msvc_set_PCHPDBFLAGS ( env ) : if env . get ( 'MSVC_VERSION' , False ) : maj , min = msvc_version_to_maj_min ( env [ 'MSVC_VERSION' ] ) if maj < 8 : env [ 'PCHPDBFLAGS' ] = SCons . Util . CLVar ( [ '${(PDB and "/Yd") or ""}' ] ) else : env [ 'PCHPDBFLAGS' ] = '' else : env [ 'PCHPDBFLAGS' ] = SCons . Util . CLVar ( [ '${(PDB and "/Yd") or ""}' ] )
Set appropriate PCHPDBFLAGS for the MSVC version being used .
22,178
def pch_emitter ( target , source , env ) : validate_vars ( env ) pch = None obj = None for t in target : if SCons . Util . splitext ( str ( t ) ) [ 1 ] == '.pch' : pch = t if SCons . Util . splitext ( str ( t ) ) [ 1 ] == '.obj' : obj = t if not obj : obj = SCons . Util . splitext ( str ( pch ) ) [ 0 ] + '.obj' target = [ pch , obj ] return ( target , source )
Adds the object file target .
22,179
def object_emitter ( target , source , env , parent_emitter ) : validate_vars ( env ) parent_emitter ( target , source , env ) if 'PCH' in env : pch = env [ 'PCH' ] if str ( target [ 0 ] ) != SCons . Util . splitext ( str ( pch ) ) [ 0 ] + '.obj' : env . Depends ( target , pch ) return ( target , source )
Sets up the PCH dependencies for an object file .
22,180
def msvc_batch_key ( action , env , target , source ) : if not 'MSVC_BATCH' in env or env . subst ( '$MSVC_BATCH' ) in ( '0' , 'False' , '' , None ) : return None t = target [ 0 ] s = source [ 0 ] if os . path . splitext ( t . name ) [ 0 ] != os . path . splitext ( s . name ) [ 0 ] : return None return ( id ( action ) , id ( env ) , t . dir , s . dir )
Returns a key to identify unique batches of sources for compilation .
22,181
def generate ( env ) : static_obj , shared_obj = SCons . Tool . createObjBuilders ( env ) static_obj . cmdgen . source_ext_match = False shared_obj . cmdgen . source_ext_match = False for suffix in CSuffixes : static_obj . add_action ( suffix , CAction ) shared_obj . add_action ( suffix , ShCAction ) static_obj . add_emitter ( suffix , static_object_emitter ) shared_obj . add_emitter ( suffix , shared_object_emitter ) for suffix in CXXSuffixes : static_obj . add_action ( suffix , CXXAction ) shared_obj . add_action ( suffix , ShCXXAction ) static_obj . add_emitter ( suffix , static_object_emitter ) shared_obj . add_emitter ( suffix , shared_object_emitter ) env [ 'CCPDBFLAGS' ] = SCons . Util . CLVar ( [ '${(PDB and "/Z7") or ""}' ] ) env [ 'CCPCHFLAGS' ] = SCons . Util . CLVar ( [ '${(PCH and "/Yu%s \\\"/Fp%s\\\""%(PCHSTOP or "",File(PCH))) or ""}' ] ) env [ '_MSVC_OUTPUT_FLAG' ] = msvc_output_flag env [ '_CCCOMCOM' ] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS $CCPCHFLAGS $CCPDBFLAGS' env [ 'CC' ] = 'cl' env [ 'CCFLAGS' ] = SCons . Util . CLVar ( '/nologo' ) env [ 'CFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'CCCOM' ] = '${TEMPFILE("$CC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CFLAGS $CCFLAGS $_CCCOMCOM","$CCCOMSTR")}' env [ 'SHCC' ] = '$CC' env [ 'SHCCFLAGS' ] = SCons . Util . CLVar ( '$CCFLAGS' ) env [ 'SHCFLAGS' ] = SCons . Util . CLVar ( '$CFLAGS' ) env [ 'SHCCCOM' ] = '${TEMPFILE("$SHCC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCFLAGS $SHCCFLAGS $_CCCOMCOM","$SHCCCOMSTR")}' env [ 'CXX' ] = '$CC' env [ 'CXXFLAGS' ] = SCons . Util . CLVar ( '$( /TP $)' ) env [ 'CXXCOM' ] = '${TEMPFILE("$CXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CXXFLAGS $CCFLAGS $_CCCOMCOM","$CXXCOMSTR")}' env [ 'SHCXX' ] = '$CXX' env [ 'SHCXXFLAGS' ] = SCons . Util . CLVar ( '$CXXFLAGS' ) env [ 'SHCXXCOM' ] = '${TEMPFILE("$SHCXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM","$SHCXXCOMSTR")}' env [ 'CPPDEFPREFIX' ] = '/D' env [ 'CPPDEFSUFFIX' ] = '' env [ 'INCPREFIX' ] = '/I' env [ 'INCSUFFIX' ] = '' env [ 'STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME' ] = 1 env [ 'RC' ] = 'rc' env [ 'RCFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'RCSUFFIXES' ] = [ '.rc' , '.rc2' ] env [ 'RCCOM' ] = '$RC $_CPPDEFFLAGS $_CPPINCFLAGS $RCFLAGS /fo$TARGET $SOURCES' env [ 'BUILDERS' ] [ 'RES' ] = res_builder env [ 'OBJPREFIX' ] = '' env [ 'OBJSUFFIX' ] = '.obj' env [ 'SHOBJPREFIX' ] = '$OBJPREFIX' env [ 'SHOBJSUFFIX' ] = '$OBJSUFFIX' msvc_setup_env_once ( env ) env [ 'CFILESUFFIX' ] = '.c' env [ 'CXXFILESUFFIX' ] = '.cc' msvc_set_PCHPDBFLAGS ( env ) env [ 'PCHCOM' ] = '$CXX /Fo${TARGETS[1]} $CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Yc$PCHSTOP /Fp${TARGETS[0]} $CCPDBFLAGS $PCHPDBFLAGS' env [ 'BUILDERS' ] [ 'PCH' ] = pch_builder if 'ENV' not in env : env [ 'ENV' ] = { } if 'SystemRoot' not in env [ 'ENV' ] : env [ 'ENV' ] [ 'SystemRoot' ] = SCons . Platform . win32 . get_system_root ( )
Add Builders and construction variables for MSVC ++ to an Environment .
22,182
def open ( self ) : self . hwman = HardwareManager ( port = self . _port ) self . opened = True if self . _connection_string is not None : try : self . hwman . connect_direct ( self . _connection_string ) except HardwareError : self . hwman . close ( ) raise elif self . _connect_id is not None : try : self . hwman . connect ( self . _connect_id ) except HardwareError : self . hwman . close ( ) raise
Open and potentially connect to a device .
22,183
def close ( self ) : if self . hwman . stream . connected : self . hwman . disconnect ( ) self . hwman . close ( ) self . opened = False
Close and potentially disconnect from a device .
22,184
def get_support_package ( tile ) : packages = tile . find_products ( 'support_package' ) if len ( packages ) == 0 : return None elif len ( packages ) == 1 : return packages [ 0 ] raise BuildError ( "Tile declared multiple support packages, only one is supported" , packages = packages )
Returns the support_package product .
22,185
def iter_support_files ( tile ) : support_package = get_support_package ( tile ) if support_package is None : for module , _ , _ in iter_python_modules ( tile ) : yield os . path . basename ( module ) , module else : for dirpath , _dirnames , filenames in os . walk ( support_package ) : for filename in filenames : if not filename . endswith ( '.py' ) : continue input_path = os . path . join ( dirpath , filename ) output_path = os . path . relpath ( input_path , start = support_package ) if output_path == "__init__.py" : continue yield output_path , input_path
Iterate over all files that go in the support wheel .
22,186
def iter_python_modules ( tile ) : for product_type in tile . PYTHON_PRODUCTS : for product in tile . find_products ( product_type ) : entry_point = ENTRY_POINT_MAP . get ( product_type ) if entry_point is None : raise BuildError ( "Found an unknown python product (%s) whose entrypoint could not be determined (%s)" % ( product_type , product ) ) if ':' in product : module , _ , obj_name = product . rpartition ( ':' ) else : module = product obj_name = None if not os . path . exists ( module ) : raise BuildError ( "Found a python product whose path did not exist: %s" % module ) product_name = os . path . basename ( module ) if product_name . endswith ( ".py" ) : product_name = product_name [ : - 3 ] import_string = "{} = {}.{}" . format ( product_name , tile . support_distribution , product_name ) if obj_name is not None : import_string += ":{}" . format ( obj_name ) yield ( module , import_string , entry_point )
Iterate over all python products in the given tile .
22,187
def generate_setup_py ( target , source , env ) : tile = env [ 'TILE' ] data = { } entry_points = { } for _mod , import_string , entry_point in iter_python_modules ( tile ) : if entry_point not in entry_points : entry_points [ entry_point ] = [ ] entry_points [ entry_point ] . append ( import_string ) data [ 'name' ] = tile . support_distribution data [ 'package' ] = tile . support_distribution data [ 'version' ] = tile . parsed_version . pep440_string ( ) data [ 'deps' ] = [ "{0} {1}" . format ( x . support_distribution , x . parsed_version . pep440_compatibility_specifier ( ) ) for x in _iter_dependencies ( tile ) if x . has_wheel ] if tile . support_wheel_depends : data [ 'deps' ] += tile . support_wheel_depends data [ 'entry_points' ] = entry_points outdir = os . path . dirname ( str ( target [ 0 ] ) ) render_template ( 'setup.py.tpl' , data , out_path = str ( target [ 0 ] ) ) curr = os . getcwd ( ) os . chdir ( outdir ) try : setuptools . sandbox . run_setup ( 'setup.py' , [ '-q' , 'clean' , 'sdist' ] ) if "python_universal" in tile . settings : setuptools . sandbox . run_setup ( 'setup.py' , [ '-q' , 'clean' , 'bdist_wheel' , '--universal' ] ) else : setuptools . sandbox . run_setup ( 'setup.py' , [ '-q' , 'clean' , 'bdist_wheel' ] ) finally : os . chdir ( curr )
Generate the setup . py file for this distribution .
22,188
def defaultMachine ( use_rpm_default = True ) : if use_rpm_default : try : rmachine = subprocess . check_output ( [ 'rpm' , '--eval=%_target_cpu' ] , shell = False ) . rstrip ( ) rmachine = SCons . Util . to_str ( rmachine ) except Exception as e : return defaultMachine ( False ) else : rmachine = platform . machine ( ) if rmachine in arch_canon : rmachine = arch_canon [ rmachine ] [ 0 ] return rmachine
Return the canonicalized machine name .
22,189
def defaultSystem ( ) : rsystem = platform . system ( ) if rsystem in os_canon : rsystem = os_canon [ rsystem ] [ 0 ] return rsystem
Return the canonicalized system name .
22,190
def prepare ( self ) : global print_prepare T = self . tm . trace if T : T . write ( self . trace_message ( u'Task.prepare()' , self . node ) ) self . exception_raise ( ) if self . tm . message : self . display ( self . tm . message ) self . tm . message = None executor = self . targets [ 0 ] . get_executor ( ) if executor is None : return executor . prepare ( ) for t in executor . get_action_targets ( ) : if print_prepare : print ( "Preparing target %s..." % t ) for s in t . side_effects : print ( "...with side-effect %s..." % s ) t . prepare ( ) for s in t . side_effects : if print_prepare : print ( "...Preparing side-effect %s..." % s ) s . prepare ( )
Called just before the task is executed .
22,191
def execute ( self ) : T = self . tm . trace if T : T . write ( self . trace_message ( u'Task.execute()' , self . node ) ) try : cached_targets = [ ] for t in self . targets : if not t . retrieve_from_cache ( ) : break cached_targets . append ( t ) if len ( cached_targets ) < len ( self . targets ) : for t in cached_targets : try : t . fs . unlink ( t . get_internal_path ( ) ) except ( IOError , OSError ) : pass self . targets [ 0 ] . build ( ) else : for t in cached_targets : t . cached = 1 except SystemExit : exc_value = sys . exc_info ( ) [ 1 ] raise SCons . Errors . ExplicitExit ( self . targets [ 0 ] , exc_value . code ) except SCons . Errors . UserError : raise except SCons . Errors . BuildError : raise except Exception as e : buildError = SCons . Errors . convert_to_BuildError ( e ) buildError . node = self . targets [ 0 ] buildError . exc_info = sys . exc_info ( ) raise buildError
Called to execute the task .
22,192
def executed_without_callbacks ( self ) : T = self . tm . trace if T : T . write ( self . trace_message ( 'Task.executed_without_callbacks()' , self . node ) ) for t in self . targets : if t . get_state ( ) == NODE_EXECUTING : for side_effect in t . side_effects : side_effect . set_state ( NODE_NO_STATE ) t . set_state ( NODE_EXECUTED )
Called when the task has been successfully executed and the Taskmaster instance doesn t want to call the Node s callback methods .
22,193
def executed_with_callbacks ( self ) : global print_prepare T = self . tm . trace if T : T . write ( self . trace_message ( 'Task.executed_with_callbacks()' , self . node ) ) for t in self . targets : if t . get_state ( ) == NODE_EXECUTING : for side_effect in t . side_effects : side_effect . set_state ( NODE_NO_STATE ) t . set_state ( NODE_EXECUTED ) if not t . cached : t . push_to_cache ( ) t . built ( ) t . visited ( ) if ( not print_prepare and ( not hasattr ( self , 'options' ) or not self . options . debug_includes ) ) : t . release_target_info ( ) else : t . visited ( )
Called when the task has been successfully executed and the Taskmaster instance wants to call the Node s callback methods .
22,194
def fail_stop ( self ) : T = self . tm . trace if T : T . write ( self . trace_message ( 'Task.failed_stop()' , self . node ) ) self . tm . will_not_build ( self . targets , lambda n : n . set_state ( NODE_FAILED ) ) self . tm . stop ( ) self . targets = [ self . tm . current_top ] self . top = 1
Explicit stop - the - build failure .
22,195
def fail_continue ( self ) : T = self . tm . trace if T : T . write ( self . trace_message ( 'Task.failed_continue()' , self . node ) ) self . tm . will_not_build ( self . targets , lambda n : n . set_state ( NODE_FAILED ) )
Explicit continue - the - build failure .
22,196
def make_ready_all ( self ) : T = self . tm . trace if T : T . write ( self . trace_message ( 'Task.make_ready_all()' , self . node ) ) self . out_of_date = self . targets [ : ] for t in self . targets : t . disambiguate ( ) . set_state ( NODE_EXECUTING ) for s in t . side_effects : s . disambiguate ( ) . set_state ( NODE_EXECUTING )
Marks all targets in a task ready for execution .
22,197
def make_ready_current ( self ) : global print_prepare T = self . tm . trace if T : T . write ( self . trace_message ( u'Task.make_ready_current()' , self . node ) ) self . out_of_date = [ ] needs_executing = False for t in self . targets : try : t . disambiguate ( ) . make_ready ( ) is_up_to_date = not t . has_builder ( ) or ( not t . always_build and t . is_up_to_date ( ) ) except EnvironmentError as e : raise SCons . Errors . BuildError ( node = t , errstr = e . strerror , filename = e . filename ) if not is_up_to_date : self . out_of_date . append ( t ) needs_executing = True if needs_executing : for t in self . targets : t . set_state ( NODE_EXECUTING ) for s in t . side_effects : s . disambiguate ( ) . set_state ( NODE_EXECUTING ) else : for t in self . targets : t . visited ( ) t . set_state ( NODE_UP_TO_DATE ) if ( not print_prepare and ( not hasattr ( self , 'options' ) or not self . options . debug_includes ) ) : t . release_target_info ( )
Marks all targets in a task ready for execution if any target is not current .
22,198
def postprocess ( self ) : T = self . tm . trace if T : T . write ( self . trace_message ( u'Task.postprocess()' , self . node ) ) targets = set ( self . targets ) pending_children = self . tm . pending_children parents = { } for t in targets : if t . waiting_parents : if T : T . write ( self . trace_message ( u'Task.postprocess()' , t , 'removing' ) ) pending_children . discard ( t ) for p in t . waiting_parents : parents [ p ] = parents . get ( p , 0 ) + 1 for t in targets : if t . side_effects is not None : for s in t . side_effects : if s . get_state ( ) == NODE_EXECUTING : s . set_state ( NODE_NO_STATE ) for p in s . waiting_parents : parents [ p ] = parents . get ( p , 0 ) + 1 for p in s . waiting_s_e : if p . ref_count == 0 : self . tm . candidates . append ( p ) for p , subtract in parents . items ( ) : p . ref_count = p . ref_count - subtract if T : T . write ( self . trace_message ( u'Task.postprocess()' , p , 'adjusted parent ref count' ) ) if p . ref_count == 0 : self . tm . candidates . append ( p ) for t in targets : t . postprocess ( )
Post - processes a task after it s been executed .
22,199
def exception_set ( self , exception = None ) : if not exception : exception = sys . exc_info ( ) self . exception = exception self . exception_raise = self . _exception_raise
Records an exception to be raised at the appropriate time .