idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
44,100
def hashify_files ( files : list ) -> dict : return { filepath . replace ( '\\' , '/' ) : hash_tree ( filepath ) for filepath in listify ( files ) }
Return mapping from file path to file hash .
44,101
def process_prop ( prop_type : PT , value , build_context ) : if prop_type in ( PT . Target , PT . TargetList ) : return hashify_targets ( value , build_context ) elif prop_type in ( PT . File , PT . FileList ) : return hashify_files ( value ) return value
Return a cachable representation of the prop value given its type .
44,102
def compute_json ( self , build_context ) : props = { } test_props = { } for prop in self . props : if prop in self . _prop_json_blacklist : continue sig_spec = Plugin . builders [ self . builder_name ] . sig . get ( prop ) if sig_spec is None : continue if prop in self . _prop_json_testlist : test_props [ prop ] = process_prop ( sig_spec . type , self . props [ prop ] , build_context ) else : props [ prop ] = process_prop ( sig_spec . type , self . props [ prop ] , build_context ) json_dict = dict ( name = self . name , builder_name = self . builder_name , deps = hashify_targets ( self . deps , build_context ) , props = props , buildenv = hashify_targets ( self . buildenv , build_context ) , tags = sorted ( list ( self . tags ) ) , flavor = build_context . conf . flavor , ) json_test_dict = dict ( props = test_props , ) self . _json = json . dumps ( json_dict , sort_keys = True , indent = 4 ) self . _test_json = json . dumps ( json_test_dict , sort_keys = True , indent = 4 )
Compute and store a JSON serialization of this target for caching purposes .
44,103
def json ( self , build_context ) -> str : if self . _json is None : self . compute_json ( build_context ) return self . _json
Return JSON serialization of this target for caching purposes .
44,104
def compute_hash ( self , build_context ) : m = md5 ( ) m . update ( self . json ( build_context ) . encode ( 'utf8' ) ) self . _hash = m . hexdigest ( ) m = md5 ( ) m . update ( self . test_json ( build_context ) . encode ( 'utf8' ) ) self . _test_hash = m . hexdigest ( )
Compute and store the hash of this target for caching purposes .
44,105
def hash ( self , build_context ) -> str : if self . _hash is None : self . compute_hash ( build_context ) return self . _hash
Return the hash of this target for caching purposes .
44,106
def handle_build_cache ( conf : Config , name : str , tag : str , icb : ImageCachingBehavior ) : if icb . pull_if_cached or ( icb . pull_if_not_cached and get_cached_image_id ( icb . remote_image ) is None ) : try : pull_docker_image ( icb . remote_image , conf . docker_pull_cmd ) except CalledProcessError : pass local_image = '{}:{}' . format ( name , tag ) if ( icb . skip_build_if_cached and get_cached_image_id ( icb . remote_image ) is not None ) : tag_docker_image ( icb . remote_image , local_image ) return get_cached_image_id ( local_image ) if ( ( not icb . allow_build_if_not_cached ) and get_cached_image_id ( icb . remote_image ) is None ) : raise RuntimeError ( 'No cached image for {}' . format ( local_image ) ) return None
Handle Docker image build cache .
44,107
def add_stream_handler ( logger , stream ) : handler = logging . StreamHandler ( stream = stream ) formatter = logging . Formatter ( '{asctime} {name:24s} {levelname:8s} {message}' , style = '{' ) handler . setFormatter ( formatter ) logger . addHandler ( handler )
Add a brace - handler stream - handler using stream to logger .
44,108
def configure_logging ( conf ) : root_logger = logging . getLogger ( ) root_logger . setLevel ( getattr ( logging , conf . loglevel . upper ( ) ) ) if conf . logtostderr : add_stream_handler ( root_logger , sys . stderr ) if conf . logtostdout : add_stream_handler ( root_logger , sys . stdout )
Initialize and configure logging .
44,109
def register_sig ( self , builder_name : str , sig : list , docstring : str , cachable : bool = True , attempts = 1 ) : if self . sig is not None : raise KeyError ( '{} already registered a signature!' . format ( builder_name ) ) self . sig = OrderedDict ( name = ArgSpec ( PropType . TargetName , Empty ) ) self . docstring = docstring kwargs_section = False for arg_spec in listify ( sig ) : arg_name , sig_spec = evaluate_arg_spec ( arg_spec ) if arg_name in self . sig or arg_name in INJECTED_ARGS : raise SyntaxError ( "duplicate argument '{}' in function definition" . format ( arg_name ) ) self . sig [ arg_name ] = sig_spec if sig_spec . default == Empty : if kwargs_section : raise SyntaxError ( 'non-default argument follows default argument' ) self . min_positional_args += 1 else : kwargs_section = True self . sig [ 'deps' ] = ArgSpec ( PropType . TargetList , None ) self . sig [ 'cachable' ] = ArgSpec ( PropType . bool , cachable ) self . sig [ 'license' ] = ArgSpec ( PropType . StrList , None ) self . sig [ 'policies' ] = ArgSpec ( PropType . StrList , None ) self . sig [ 'packaging_params' ] = ArgSpec ( PropType . dict , None ) self . sig [ 'runtime_params' ] = ArgSpec ( PropType . dict , None ) self . sig [ 'build_params' ] = ArgSpec ( PropType . dict , None ) self . sig [ 'attempts' ] = ArgSpec ( PropType . numeric , 1 )
Register a builder signature & docstring for builder_name .
44,110
def remove_builder ( cls , builder_name : str ) : cls . builders . pop ( builder_name , None ) for hook_spec in cls . hooks . values ( ) : hook_spec . pop ( builder_name , None )
Remove a registered builder builder_name .
44,111
def to_build_module ( build_file_path : str , conf : Config ) -> str : build_file = Path ( build_file_path ) root = Path ( conf . project_root ) return build_file . resolve ( ) . relative_to ( root ) . parent . as_posix ( ) . strip ( '.' )
Return a normalized build module name for build_file_path .
44,112
def cmd_version ( unused_conf ) : import pkg_resources print ( 'This is {} version {}, imported from {}' . format ( __oneliner__ , __version__ , __file__ ) ) if len ( Plugin . builders ) > 0 : print ( 'setuptools registered builders:' ) for entry_point in pkg_resources . iter_entry_points ( 'yabt.builders' ) : print ( ' {0.module_name}.{0.name} (dist {0.dist})' . format ( entry_point ) )
Print out version information about YABT and detected builders .
44,113
def cmd_list ( unused_conf : Config ) : for name , builder in sorted ( Plugin . builders . items ( ) ) : if builder . func : print ( '+- {0:16s} implemented in {1.__module__}.{1.__name__}()' . format ( name , builder . func ) ) else : print ( '+- {0:16s} loaded with no builder function' . format ( name ) ) for hook_name , hook_func in sorted ( Plugin . get_hooks_for_builder ( name ) ) : print ( ' +- {0} hook implemented in ' '{1.__module__}.{1.__name__}()' . format ( hook_name , hook_func ) )
Print out information on loaded builders and hooks .
44,114
def cmd_build ( conf : Config , run_tests : bool = False ) : build_context = BuildContext ( conf ) populate_targets_graph ( build_context , conf ) build_context . build_graph ( run_tests = run_tests ) build_context . write_artifacts_metadata ( )
Build requested targets and their dependencies .
44,115
def main ( ) : conf = init_and_get_conf ( ) logger = make_logger ( __name__ ) logger . info ( 'YaBT version {}' , __version__ ) handlers = { 'build' : YabtCommand ( func = cmd_build , requires_project = True ) , 'dot' : YabtCommand ( func = cmd_dot , requires_project = True ) , 'test' : YabtCommand ( func = cmd_test , requires_project = True ) , 'tree' : YabtCommand ( func = cmd_tree , requires_project = True ) , 'version' : YabtCommand ( func = cmd_version , requires_project = False ) , 'list-builders' : YabtCommand ( func = cmd_list , requires_project = False ) , } command = handlers [ conf . cmd ] if command . requires_project and not conf . in_yabt_project ( ) : fatal ( 'Not a YABT project (or any of the parent directories): {}' , BUILD_PROJ_FILE ) try : command . func ( conf ) except Exception as ex : fatal ( '{}' , ex )
Main ybt console script entry point - run YABT from command - line .
44,116
def cpp_app_builder ( build_context , target ) : yprint ( build_context . conf , 'Build CppApp' , target ) if target . props . executable and target . props . main : raise KeyError ( '`main` and `executable` arguments are mutually exclusive' ) if target . props . executable : if target . props . executable not in target . artifacts . get ( AT . app ) : target . artifacts . add ( AT . app , target . props . executable ) entrypoint = [ target . props . executable ] elif target . props . main : prog = build_context . targets [ target . props . main ] binary = list ( prog . artifacts . get ( AT . binary ) . keys ( ) ) [ 0 ] entrypoint = [ '/usr/src/bin/' + binary ] else : raise KeyError ( 'Must specify either `main` or `executable` argument' ) build_app_docker_and_bin ( build_context , target , entrypoint = entrypoint )
Pack a C ++ binary as a Docker image with its runtime dependencies .
44,117
def make_pre_build_hook ( extra_compiler_config_params ) : def pre_build_hook ( build_context , target ) : target . compiler_config = CompilerConfig ( build_context , target , extra_compiler_config_params ) target . props . _internal_dict_ [ 'compiler_config' ] = ( target . compiler_config . as_dict ( ) ) return pre_build_hook
Return a pre - build hook function for C ++ builders .
44,118
def compile_cc ( build_context , compiler_config , buildenv , sources , workspace_dir , buildenv_workspace , cmd_env ) : objects = [ ] for src in sources : obj_rel_path = '{}.o' . format ( splitext ( src ) [ 0 ] ) obj_file = join ( buildenv_workspace , obj_rel_path ) include_paths = [ buildenv_workspace ] + compiler_config . include_path compile_cmd = ( [ compiler_config . compiler , '-o' , obj_file , '-c' ] + compiler_config . compile_flags + [ '-I{}' . format ( path ) for path in include_paths ] + [ join ( buildenv_workspace , src ) ] ) build_context . run_in_buildenv ( buildenv , compile_cmd , cmd_env ) objects . append ( join ( relpath ( workspace_dir , build_context . conf . project_root ) , obj_rel_path ) ) return objects
Compile list of C ++ source files in a buildenv image and return list of generated object file .
44,119
def get_source_files ( target , build_context ) -> list : all_sources = list ( target . props . sources ) for proto_dep_name in target . props . protos : proto_dep = build_context . targets [ proto_dep_name ] all_sources . extend ( proto_dep . artifacts . get ( AT . gen_cc ) . keys ( ) ) return all_sources
Return list of source files for target .
44,120
def build_cpp ( build_context , target , compiler_config , workspace_dir ) : rmtree ( workspace_dir ) binary = join ( * split ( target . name ) ) objects = link_cpp_artifacts ( build_context , target , workspace_dir , True ) buildenv_workspace = build_context . conf . host_to_buildenv_path ( workspace_dir ) objects . extend ( compile_cc ( build_context , compiler_config , target . props . in_buildenv , get_source_files ( target , build_context ) , workspace_dir , buildenv_workspace , target . props . cmd_env ) ) bin_file = join ( buildenv_workspace , binary ) link_cmd = ( [ compiler_config . linker , '-o' , bin_file ] + objects + compiler_config . link_flags ) build_context . run_in_buildenv ( target . props . in_buildenv , link_cmd , target . props . cmd_env ) target . artifacts . add ( AT . binary , relpath ( join ( workspace_dir , binary ) , build_context . conf . project_root ) , binary )
Compile and link a C ++ binary for target .
44,121
def cpp_prog_builder ( build_context , target ) : yprint ( build_context . conf , 'Build CppProg' , target ) workspace_dir = build_context . get_workspace ( 'CppProg' , target . name ) build_cpp ( build_context , target , target . compiler_config , workspace_dir )
Build a C ++ binary executable
44,122
def cpp_lib_builder ( build_context , target ) : yprint ( build_context . conf , 'Build CppLib' , target ) workspace_dir = build_context . get_workspace ( 'CppLib' , target . name ) workspace_src_dir = join ( workspace_dir , 'src' ) rmtree ( workspace_src_dir ) link_cpp_artifacts ( build_context , target , workspace_src_dir , False ) buildenv_workspace = build_context . conf . host_to_buildenv_path ( workspace_src_dir ) objects = compile_cc ( build_context , target . compiler_config , target . props . in_buildenv , get_source_files ( target , build_context ) , workspace_src_dir , buildenv_workspace , target . props . cmd_env ) for obj_file in objects : target . artifacts . add ( AT . object , obj_file )
Build C ++ object files
44,123
def standard_licenses_only ( build_context , target ) -> str : for license_name in target . props . license : if license_name not in KNOWN_LICENSES : return 'Unknown license: {}' . format ( license_name ) return None
A policy function for allowing specifying only known licenses .
44,124
def whitelist_licenses_policy ( policy_name : str , allowed_licenses : set ) : def policy_func ( build_context , target ) : if policy_name in target . props . policies : licenses = set ( target . props . license ) for dep in build_context . generate_all_deps ( target ) : licenses . update ( dep . props . license ) licenses . difference_update ( allowed_licenses ) if licenses : return 'Invalid licenses for {} policy: {}' . format ( policy_name , ', ' . join ( sorted ( licenses ) ) ) return None policy_func . __name__ = 'whitelist_{}_licenses' . format ( policy_name ) return policy_func
A policy factory for making license - based whitelist policies .
44,125
def find_project_config_file ( project_root : str ) -> str : if project_root : project_config_file = os . path . join ( project_root , YCONFIG_FILE ) if os . path . isfile ( project_config_file ) : return project_config_file
Return absolute path to project - specific config file if it exists .
44,126
def get_user_settings_module ( project_root : str ) : if project_root : project_settings_file = os . path . join ( project_root , YSETTINGS_FILE ) if os . path . isfile ( project_settings_file ) : settings_loader = SourceFileLoader ( 'settings' , project_settings_file ) return settings_loader . load_module ( )
Return project - specific user settings module if it exists .
44,127
def call_user_func ( settings_module , func_name , * args , ** kwargs ) : if settings_module : if hasattr ( settings_module , func_name ) : func = getattr ( settings_module , func_name ) try : return func ( * args , ** kwargs ) finally : delattr ( settings_module , func_name )
Call a user - supplied settings function and clean it up afterwards .
44,128
def get_build_flavor ( settings_module , args ) : known_flavors = listify ( call_user_func ( settings_module , 'known_flavors' ) ) if args . flavor : if args . flavor not in known_flavors : raise ValueError ( 'Unknown build flavor: {}' . format ( args . flavor ) ) else : args . flavor = call_user_func ( settings_module , 'default_flavor' ) if args . flavor and args . flavor not in known_flavors : raise ValueError ( 'Unknown default build flavor: {}' . format ( args . flavor ) )
Update the flavor arg based on the settings API
44,129
def init_and_get_conf ( argv : list = None ) -> Config : colorama . init ( ) work_dir = os . path . abspath ( os . curdir ) project_root = search_for_parent_dir ( work_dir , with_files = set ( [ BUILD_PROJ_FILE ] ) ) parser = make_parser ( find_project_config_file ( project_root ) ) settings_module = get_user_settings_module ( project_root ) call_user_func ( settings_module , 'extend_cli' , parser ) argcomplete . autocomplete ( parser ) args = parser . parse ( argv ) get_build_flavor ( settings_module , args ) config = Config ( args , project_root , work_dir , settings_module ) config . common_conf = call_user_func ( config . settings , 'get_common_config' , config , args ) config . flavor_conf = call_user_func ( config . settings , 'get_flavored_config' , config , args ) call_user_func ( config . settings , 'extend_config' , config , args ) if not args . no_policies : config . policies = listify ( call_user_func ( config . settings , 'get_policies' , config ) ) return config
Initialize a YABT CLI environment and return a Config instance .
44,130
def stable_reverse_topological_sort ( graph ) : if not graph . is_directed ( ) : raise networkx . NetworkXError ( 'Topological sort not defined on undirected graphs.' ) seen = set ( ) explored = set ( ) for v in sorted ( graph . nodes ( ) ) : if v in explored : continue fringe = [ v ] while fringe : w = fringe [ - 1 ] if w in explored : fringe . pop ( ) continue seen . add ( w ) new_nodes = [ ] for n in sorted ( graph [ w ] ) : if n not in explored : if n in seen : raise networkx . NetworkXUnfeasible ( 'Graph contains a cycle.' ) new_nodes . append ( n ) if new_nodes : fringe . extend ( new_nodes ) else : explored . add ( w ) yield w fringe . pop ( )
Return a list of nodes in topological sort order .
44,131
def raise_unresolved_targets ( build_context , conf , unknown_seeds , seed_refs ) : def format_target ( target_name ) : build_module = split_build_module ( target_name ) return '{} (in {})' . format ( target_name , conf . get_build_file_path ( build_module ) ) def format_unresolved ( seed ) : if seed not in seed_refs : return seed seed_ref = seed_refs [ seed ] reasons = [ ] if seed_ref . on_cli : reasons . append ( 'seen on command line' ) if seed_ref . from_default : reasons . append ( 'specified as default target in {}' . format ( conf . get_project_build_file ) ) if seed_ref . dep_of : reasons . append ( 'dependency of ' + ', ' . join ( format_target ( target_name ) for target_name in sorted ( seed_ref . dep_of ) ) ) if seed_ref . buildenv_of : reasons . append ( 'buildenv of ' + ', ' . join ( format_target ( target_name ) for target_name in sorted ( seed_ref . buildenv_of ) ) ) return '{} - {}' . format ( seed , ', ' . join ( reasons ) ) unresolved_str = '\n' . join ( format_unresolved ( target_name ) for target_name in sorted ( unknown_seeds ) ) num_target_str = '{} target' . format ( len ( unknown_seeds ) ) if len ( unknown_seeds ) > 1 : num_target_str += 's' raise ValueError ( 'Could not resolve {}:\n{}' . format ( num_target_str , unresolved_str ) )
Raise error about unresolved targets during graph parsing .
44,132
def register_scm_provider ( scm_name : str ) : def register_decorator ( scm_class : SourceControl ) : if scm_name in ScmManager . providers : raise KeyError ( '{} already registered!' . format ( scm_name ) ) ScmManager . providers [ scm_name ] = scm_class SourceControl . register ( scm_class ) logger . debug ( 'Registered {0} SCM from {1.__module__}.{1.__name__}' , scm_name , scm_class ) return scm_class return register_decorator
Return a decorator for registering a SCM provider named scm_name .
44,133
def get_provider ( cls , scm_name : str , conf ) -> SourceControl : for entry_point in pkg_resources . iter_entry_points ( 'yabt.scm' , scm_name ) : entry_point . load ( ) logger . debug ( 'Loaded SCM provider {0.name} from {0.module_name} ' '(dist {0.dist})' , entry_point ) logger . debug ( 'Loaded {} SCM providers' , len ( cls . providers ) ) if scm_name not in cls . providers : raise KeyError ( 'Unknown SCM identifier {}' . format ( scm_name ) ) return cls . providers [ scm_name ] ( conf )
Load and return named SCM provider instance .
44,134
def write_dot ( build_context , conf : Config , out_f ) : not_buildenv_targets = get_not_buildenv_targets ( build_context ) prebuilt_targets = get_prebuilt_targets ( build_context ) out_f . write ( 'strict digraph {\n' ) for node in build_context . target_graph . nodes : if conf . show_buildenv_deps or node in not_buildenv_targets : cached = node in prebuilt_targets fillcolor = 'fillcolor="grey",style=filled' if cached else '' color = TARGETS_COLORS . get ( build_context . targets [ node ] . builder_name , 'black' ) out_f . write ( ' "{}" [color="{}",{}];\n' . format ( node , color , fillcolor ) ) out_f . writelines ( ' "{}" -> "{}";\n' . format ( u , v ) for u , v in build_context . target_graph . edges if conf . show_buildenv_deps or ( u in not_buildenv_targets and v in not_buildenv_targets ) ) out_f . write ( '}\n\n' )
Write build graph in dot format to out_f file - like object .
44,135
def get_workspace ( self , * parts ) -> str : workspace_dir = os . path . join ( self . conf . get_workspace_path ( ) , * ( get_safe_path ( part ) for part in parts ) ) if not os . path . isdir ( workspace_dir ) : os . makedirs ( workspace_dir , exist_ok = True ) return workspace_dir
Return a path to a private workspace dir . Create sub - tree of dirs using strings from parts inside workspace and return full path to innermost directory .
44,136
def get_bin_dir ( self , build_module : str ) -> str : bin_dir = os . path . join ( self . conf . get_bin_path ( ) , build_module ) if not os . path . isdir ( bin_dir ) : os . makedirs ( bin_dir , exist_ok = True ) return bin_dir
Return a path to the binaries dir for a build module dir . Create sub - tree of missing dirs as needed and return full path to innermost directory .
44,137
def walk_target_deps_topological_order ( self , target : Target ) : all_deps = get_descendants ( self . target_graph , target . name ) for dep_name in topological_sort ( self . target_graph ) : if dep_name in all_deps : yield self . targets [ dep_name ]
Generate all dependencies of target by topological sort order .
44,138
def generate_direct_deps ( self , target : Target ) : yield from ( self . targets [ dep_name ] for dep_name in sorted ( target . deps ) )
Generate only direct dependencies of target .
44,139
def register_target ( self , target : Target ) : if target . name in self . targets : first = self . targets [ target . name ] raise NameError ( 'Target with name "{0.name}" ({0.builder_name} from module ' '"{1}") already exists - defined first as ' '{2.builder_name} in module "{3}"' . format ( target , split_build_module ( target . name ) , first , split_build_module ( first . name ) ) ) self . targets [ target . name ] = target self . targets_by_module [ split_build_module ( target . name ) ] . add ( target . name )
Register a target instance in this build context .
44,140
def get_target_extraction_context ( self , build_file_path : str ) -> dict : extraction_context = { } for name , builder in Plugin . builders . items ( ) : extraction_context [ name ] = extractor ( name , builder , build_file_path , self ) return extraction_context
Return a build file parser target extraction context .
44,141
def get_buildenv_graph ( self ) : buildenvs = set ( target . buildenv for target in self . targets . values ( ) if target . buildenv ) return nx . DiGraph ( self . target_graph . subgraph ( reduce ( lambda x , y : x | set ( y ) , ( get_descendants ( self . target_graph , buildenv ) for buildenv in buildenvs ) , buildenvs ) ) )
Return a graph induced by buildenv nodes
44,142
def ready_nodes_iter ( self , graph_copy ) : def is_ready ( target_name ) : try : next ( graph_copy . successors ( target_name ) ) except StopIteration : return True return False ready_nodes = deque ( sorted ( target_name for target_name in graph_copy . nodes if is_ready ( target_name ) ) ) produced_event = threading . Event ( ) failed_event = threading . Event ( ) def make_done_callback ( target : Target ) : def done_notifier ( ) : if graph_copy . has_node ( target . name ) : affected_nodes = list ( sorted ( graph_copy . predecessors ( target . name ) ) ) graph_copy . remove_node ( target . name ) ready_nodes . extend ( target_name for target_name in affected_nodes if is_ready ( target_name ) ) produced_event . set ( ) return done_notifier def make_retry_callback ( target : Target ) : def retry_notifier ( ) : if graph_copy . has_node ( target . name ) : ready_nodes . append ( target . name ) produced_event . set ( ) return retry_notifier def make_fail_callback ( target : Target ) : def fail_notifier ( ex ) : if isinstance ( ex , CalledProcessError ) : sys . stdout . write ( ex . stdout . decode ( 'utf-8' ) ) sys . stderr . write ( ex . stderr . decode ( 'utf-8' ) ) if graph_copy . has_node ( target . name ) : self . failed_nodes [ target . name ] = ex affected_nodes = get_ancestors ( graph_copy , target . name ) graph_copy . remove_node ( target . name ) for affected_node in affected_nodes : if affected_node in self . skipped_nodes : continue if graph_copy . has_node ( affected_node ) : self . skipped_nodes . append ( affected_node ) graph_copy . remove_node ( affected_node ) if self . conf . continue_after_fail : logger . info ( 'Failed target: {} due to error: {}' , target . name , ex ) produced_event . set ( ) else : failed_event . set ( ) fatal ( '`{}\': {}' , target . name , ex ) return fail_notifier while True : while len ( ready_nodes ) == 0 : if graph_copy . order ( ) == 0 : return if failed_event . is_set ( ) : return produced_event . wait ( 0.5 ) produced_event . clear ( ) next_node = ready_nodes . popleft ( ) node = self . targets [ next_node ] node . done = make_done_callback ( node ) node . retry = make_retry_callback ( node ) node . fail = make_fail_callback ( node ) yield node
Generate ready targets from the graph graph_copy .
44,143
def run_in_buildenv ( self , buildenv_target_name : str , cmd : list , cmd_env : dict = None , work_dir : str = None , auto_uid : bool = True , runtime : str = None , ** kwargs ) : buildenv_target = self . targets [ buildenv_target_name ] redirection = any ( stream_key in kwargs for stream_key in ( 'stdin' , 'stdout' , 'stderr' , 'input' ) ) docker_run = [ 'docker' , 'run' ] if not redirection : docker_run . append ( '-t' ) project_vol = ( self . conf . docker_volume if self . conf . docker_volume else self . conf . project_root ) container_work_dir = PurePath ( '/project' ) if work_dir : container_work_dir /= work_dir if runtime : docker_run . extend ( [ '--runtime' , runtime , ] ) docker_run . extend ( [ '--rm' , '-v' , project_vol + ':/project' , '-w' , container_work_dir . as_posix ( ) , ] ) if cmd_env : for key , value in cmd_env . items ( ) : docker_run . extend ( [ '-e' , '{}={}' . format ( key , value ) ] ) if platform . system ( ) == 'Linux' and auto_uid : docker_run . extend ( [ '-u' , '{}:{}' . format ( os . getuid ( ) , os . getgid ( ) ) , '-v' , '/etc/shadow:/etc/shadow:ro' , '-v' , '/etc/group:/etc/group:ro' , '-v' , '/etc/passwd:/etc/passwd:ro' , '-v' , '/etc/sudoers:/etc/sudoers:ro' , ] ) docker_run . append ( format_qualified_image_name ( buildenv_target ) ) docker_run . extend ( cmd ) logger . info ( 'Running command in build env "{}" using command {}' , buildenv_target_name , docker_run ) if 'stderr' not in kwargs : kwargs [ 'stderr' ] = PIPE if 'stdout' not in kwargs : kwargs [ 'stdout' ] = PIPE result = run ( docker_run , check = True , ** kwargs ) if kwargs [ 'stdout' ] is PIPE : try : sys . stdout . write ( result . stdout . decode ( 'utf-8' ) ) except UnicodeEncodeError as e : sys . stderr . write ( 'tried writing the stdout of {},\n but it ' 'has a problematic character:\n {}\n' 'hex dump of stdout:\n{}\n' . format ( docker_run , str ( e ) , codecs . encode ( result . stdout , 'hex' ) . decode ( 'utf8' ) ) ) if kwargs [ 'stderr' ] is PIPE : try : sys . stderr . write ( result . stderr . decode ( 'utf-8' ) ) except UnicodeEncodeError as e : sys . stderr . write ( 'tried writing the stderr of {},\n but it ' 'has a problematic character:\n {}\n' 'hex dump of stderr:\n{}\n' . format ( docker_run , str ( e ) , codecs . encode ( result . stderr , 'hex' ) . decode ( 'utf8' ) ) ) return result
Run a command in a named BuildEnv Docker image .
44,144
def build_target ( self , target : Target ) : builder = Plugin . builders [ target . builder_name ] if builder . func : logger . debug ( 'About to invoke the {} builder function for {}' , target . builder_name , target . name ) builder . func ( self , target ) else : logger . debug ( 'Skipping {} builder function for target {} (no ' 'function registered)' , target . builder_name , target )
Invoke the builder function for a target .
44,145
def register_target_artifact_metadata ( self , target : str , metadata : dict ) : with self . context_lock : self . artifacts_metadata [ target . name ] = metadata
Register the artifact metadata dictionary for a built target .
44,146
def write_artifacts_metadata ( self ) : if self . conf . artifacts_metadata_file : logger . info ( 'Writing artifacts metadata to file "%s"' , self . conf . artifacts_metadata_file ) with open ( self . conf . artifacts_metadata_file , 'w' ) as fp : json . dump ( self . artifacts_metadata , fp )
Write out a JSON file with all built targets artifact metadata if such output file is specified .
44,147
def get_build_file_path ( self , build_module ) -> str : project_root = Path ( self . project_root ) build_module = norm_proj_path ( build_module , '' ) return str ( project_root / build_module / ( BUILD_PROJ_FILE if '' == build_module else self . build_file_name ) )
Return a full path to the build file of build_module .
44,148
def guess_uri_type ( uri : str , hint : str = None ) : if hint : return hint norm_uri = uri . lower ( ) parsed_uri = urlparse ( norm_uri ) if parsed_uri . path . endswith ( '.git' ) : return 'git' if parsed_uri . scheme in ( 'http' , 'https' ) : ext = splitext ( parsed_uri . path ) [ - 1 ] if ext in KNOWN_ARCHIVES : return 'archive' return 'single' return 'local'
Return a guess for the URI type based on the URI string uri .
44,149
def git_handler ( unused_build_context , target , fetch , package_dir , tar ) : target_name = split_name ( target . name ) repo_dir = join ( package_dir , fetch . name ) if fetch . name else package_dir try : repo = git . Repo ( repo_dir ) except ( InvalidGitRepositoryError , NoSuchPathError ) : repo = git . Repo . clone_from ( fetch . uri , repo_dir ) assert repo . working_tree_dir == repo_dir tar . add ( package_dir , arcname = target_name , filter = gitfilter )
Handle remote Git repository URI .
44,150
def fetch_url ( url , dest , parent_to_remove_before_fetch ) : logger . debug ( 'Downloading file {} from {}' , dest , url ) try : shutil . rmtree ( parent_to_remove_before_fetch ) except FileNotFoundError : pass os . makedirs ( parent_to_remove_before_fetch ) resp = requests . get ( url , stream = True ) with open ( dest , 'wb' ) as fetch_file : for chunk in resp . iter_content ( chunk_size = 32 * 1024 ) : fetch_file . write ( chunk )
Helper function to fetch a file from a URL .
44,151
def archive_handler ( unused_build_context , target , fetch , package_dir , tar ) : package_dest = join ( package_dir , basename ( urlparse ( fetch . uri ) . path ) ) package_content_dir = join ( package_dir , 'content' ) extract_dir = ( join ( package_content_dir , fetch . name ) if fetch . name else package_content_dir ) fetch_url ( fetch . uri , package_dest , package_dir ) ext = splitext ( package_dest ) [ - 1 ] . lower ( ) if ext in ( '.gz' , '.bz2' , '.tgz' ) : with tarfile . open ( package_dest , 'r:*' ) as src_tar : src_tar . extractall ( extract_dir ) elif ext in ( '.zip' , ) : with ZipFile ( package_dest , 'r' ) as zipf : zipf . extractall ( extract_dir ) else : raise ValueError ( 'Unsupported extension {}' . format ( ext ) ) tar . add ( package_content_dir , arcname = split_name ( target . name ) )
Handle remote downloadable archive URI .
44,152
def fetch_file_handler ( unused_build_context , target , fetch , package_dir , tar ) : dl_dir = join ( package_dir , fetch . name ) if fetch . name else package_dir fetch_url ( fetch . uri , join ( dl_dir , basename ( urlparse ( fetch . uri ) . path ) ) , dl_dir ) tar . add ( package_dir , arcname = split_name ( target . name ) )
Handle remote downloadable file URI .
44,153
def get_installer_desc ( build_context , target ) -> tuple : workspace_dir = build_context . get_workspace ( 'CustomInstaller' , target . name ) target_name = split_name ( target . name ) script_name = basename ( target . props . script ) package_tarball = '{}.tar.gz' . format ( join ( workspace_dir , target_name ) ) return target_name , script_name , package_tarball
Return a target_name script_name package_tarball tuple for target
44,154
def get_prebuilt_targets ( build_context ) : logger . info ( 'Scanning for cached base images' ) contained_deps = set ( ) required_deps = set ( ) cached_descendants = CachedDescendants ( build_context . target_graph ) for target_name , target in build_context . targets . items ( ) : if 'image_caching_behavior' not in target . props : continue image_name = get_image_name ( target ) image_tag = target . props . image_tag icb = ImageCachingBehavior ( image_name , image_tag , target . props . image_caching_behavior ) target . image_id = handle_build_cache ( build_context . conf , image_name , image_tag , icb ) if target . image_id : image_deps = cached_descendants . get ( target_name ) contained_deps . update ( image_deps ) contained_deps . add ( target . name ) else : image_deps = cached_descendants . get ( target_name ) base_image_deps = cached_descendants . get ( target . props . base_image ) required_deps . update ( image_deps - base_image_deps ) return contained_deps - required_deps
Return set of target names that are contained within cached base images
44,155
def write_summary ( summary : dict , cache_dir : str ) : summary [ 'accessed' ] = time ( ) with open ( join ( cache_dir , 'summary.json' ) , 'w' ) as summary_file : summary_file . write ( json . dumps ( summary , indent = 4 , sort_keys = True ) )
Write the summary JSON to cache_dir .
44,156
def copy_artifact ( src_path : str , artifact_hash : str , conf : Config ) : cache_dir = conf . get_artifacts_cache_dir ( ) if not isdir ( cache_dir ) : makedirs ( cache_dir ) cached_artifact_path = join ( cache_dir , artifact_hash ) if isfile ( cached_artifact_path ) or isdir ( cached_artifact_path ) : logger . debug ( 'Skipping copy of existing cached artifact {} -> {}' , src_path , cached_artifact_path ) return abs_src_path = join ( conf . project_root , src_path ) logger . debug ( 'Caching artifact {} under {}' , abs_src_path , cached_artifact_path ) shutil . copy ( abs_src_path , cached_artifact_path )
Copy the artifact at src_path with hash artifact_hash to artifacts cache dir .
44,157
def restore_artifact ( src_path : str , artifact_hash : str , conf : Config ) : cache_dir = conf . get_artifacts_cache_dir ( ) if not isdir ( cache_dir ) : return False cached_artifact_path = join ( cache_dir , artifact_hash ) if isfile ( cached_artifact_path ) or isdir ( cached_artifact_path ) : actual_hash = hash_tree ( cached_artifact_path ) if actual_hash != artifact_hash : logger . warning ( 'Cached artifact {} expected hash {} != actual hash {}' , src_path , artifact_hash , actual_hash ) rmnode ( cached_artifact_path ) return False abs_src_path = join ( conf . project_root , src_path ) if isfile ( abs_src_path ) or isdir ( abs_src_path ) : existing_hash = hash_tree ( src_path ) if existing_hash == artifact_hash : logger . debug ( 'Existing artifact {} matches cached hash {}' , src_path , artifact_hash ) return True logger . debug ( 'Replacing existing artifact {} with cached one' , src_path ) rmnode ( abs_src_path ) logger . debug ( 'Restoring cached artifact {} to {}' , artifact_hash , src_path ) shutil . copy ( cached_artifact_path , abs_src_path ) return True logger . debug ( 'No cached artifact for {} with hash {}' , src_path , artifact_hash ) return False
Restore the artifact whose hash is artifact_hash to src_path .
44,158
def save_target_in_cache ( target : Target , build_context ) : cache_dir = build_context . conf . get_cache_dir ( target , build_context ) if isdir ( cache_dir ) : rmtree ( cache_dir ) makedirs ( cache_dir ) logger . debug ( 'Saving target metadata in cache under {}' , cache_dir ) with open ( join ( cache_dir , 'target.json' ) , 'w' ) as meta_file : meta_file . write ( target . json ( build_context ) ) artifacts = target . artifacts . get_all ( ) artifact_hashes = { } for artifact_type , artifact_map in artifacts . items ( ) : if artifact_type in ( AT . docker_image , ) : continue for dst_path , src_path in artifact_map . items ( ) : artifact_hashes [ dst_path ] = hash_tree ( src_path ) if artifact_type not in _NO_CACHE_TYPES : copy_artifact ( src_path , artifact_hashes [ dst_path ] , build_context . conf ) artifacts_desc = { artifact_type . name : [ { 'dst' : dst_path , 'src' : src_path , 'hash' : artifact_hashes . get ( dst_path ) } for dst_path , src_path in artifact_map . items ( ) ] for artifact_type , artifact_map in artifacts . items ( ) } with open ( join ( cache_dir , 'artifacts.json' ) , 'w' ) as artifacts_meta_file : artifacts_meta_file . write ( json . dumps ( artifacts_desc , indent = 4 , sort_keys = True ) ) summary = dict ( target . summary ) summary [ 'name' ] = target . name summary [ 'artifacts_hash' ] = hash_tree ( join ( cache_dir , 'artifacts.json' ) ) if summary . get ( 'created' ) is None : summary [ 'created' ] = time ( ) write_summary ( summary , cache_dir )
Save target to build cache for future reuse .
44,159
def get ( self , key ) : if key not in self : self [ key ] = set ( get_descendants ( self . _target_graph , key ) ) return self [ key ]
Return set of descendants of node named key in target_graph .
44,160
def fatal ( msg , * args , ** kwargs ) : exc_str = format_exc ( ) if exc_str . strip ( ) != 'NoneType: None' : logger . info ( '{}' , format_exc ( ) ) fatal_noexc ( msg , * args , ** kwargs )
Print a red msg to STDERR and exit . To be used in a context of an exception also prints out the exception . The message is formatted with args & kwargs .
44,161
def fatal_noexc ( msg , * args , ** kwargs ) : print ( Fore . RED + 'Fatal: ' + msg . format ( * args , ** kwargs ) + Style . RESET_ALL , file = sys . stderr ) sys . exit ( 1 )
Print a red msg to STDERR and exit .
44,162
def rmnode ( path : str ) : if isdir ( path ) : rmtree ( path ) elif isfile ( path ) : os . remove ( path )
Forcibly remove file or directory tree at path . Fail silently if base dir doesn t exist .
44,163
def link_files ( files : set , workspace_src_dir : str , common_parent : str , conf ) : norm_dir = normpath ( workspace_src_dir ) base_dir = '' if common_parent : common_parent = normpath ( common_parent ) base_dir = commonpath ( list ( files ) + [ common_parent ] ) if base_dir != common_parent : raise ValueError ( '{} is not the common parent of all target ' 'sources and data' . format ( common_parent ) ) logger . debug ( 'Rebasing files in image relative to common parent dir {}' , base_dir ) num_linked = 0 for src in files : abs_src = join ( conf . project_root , src ) abs_dest = join ( conf . project_root , workspace_src_dir , relpath ( src , base_dir ) ) link_node ( abs_src , abs_dest , conf . builders_workspace_dir in src ) num_linked += 1 return num_linked
Sync the list of files and directories in files to destination directory specified by workspace_src_dir .
44,164
def norm_proj_path ( path , build_module ) : if path == '//' : return '' if path . startswith ( '//' ) : norm = normpath ( path [ 2 : ] ) if norm [ 0 ] in ( '.' , '/' , '\\' ) : raise ValueError ( "Invalid path: `{}'" . format ( path ) ) return norm if path . startswith ( '/' ) : raise ValueError ( "Invalid path: `{}' - use '//' to start from " "project root" . format ( path ) ) if build_module == '//' : build_module = '' norm = normpath ( join ( build_module , path ) ) if norm . startswith ( '..' ) : raise ValueError ( "Invalid path `{}' - must remain inside project sandbox" . format ( path ) ) return norm . strip ( '.' )
Return a normalized path for the path observed in build_module .
44,165
def acc_hash ( filepath : str , hasher ) : with open ( filepath , 'rb' ) as f : while True : chunk = f . read ( _BUF_SIZE ) if not chunk : break hasher . update ( chunk )
Accumulate content of file at filepath in hasher .
44,166
def hash_file ( filepath : str ) -> str : md5 = hashlib . md5 ( ) acc_hash ( filepath , md5 ) return md5 . hexdigest ( )
Return the hexdigest MD5 hash of content of file at filepath .
44,167
def hash_tree ( filepath : str ) -> str : if isfile ( filepath ) : return hash_file ( filepath ) if isdir ( filepath ) : base_dir = filepath md5 = hashlib . md5 ( ) for root , dirs , files in walk ( base_dir ) : dirs . sort ( ) for fname in sorted ( files ) : filepath = join ( root , fname ) md5 . update ( relpath ( filepath , base_dir ) . replace ( '\\' , '/' ) . encode ( 'utf8' ) ) acc_hash ( filepath , md5 ) return md5 . hexdigest ( ) return None
Return the hexdigest MD5 hash of file or directory at filepath .
44,168
def add ( self , artifact_type : ArtifactType , src_path : str , dst_path : str = None ) : if dst_path is None : dst_path = src_path other_src_path = self . _artifacts [ artifact_type ] . setdefault ( dst_path , src_path ) if src_path != other_src_path : raise RuntimeError ( '{} artifact with dest path {} exists with different src ' 'path: {} != {}' . format ( artifact_type , dst_path , src_path , other_src_path ) )
Add an artifact of type artifact_type at src_path .
44,169
def extend ( self , artifact_type : ArtifactType , src_paths : list ) : for src_path in src_paths : self . add ( artifact_type , src_path , src_path )
Add all src_paths as artifact of type artifact_type .
44,170
def link_types ( self , base_dir : str , types : list , conf : Config ) -> int : num_linked = 0 for kind in types : artifact_map = self . _artifacts . get ( kind ) if not artifact_map : continue num_linked += self . _link ( join ( base_dir , self . type_to_dir [ kind ] ) , artifact_map , conf ) return num_linked
Link all artifacts with types types under base_dir and return the number of linked artifacts .
44,171
def link_for_image ( self , base_dir : str , conf : Config ) -> int : return self . link_types ( base_dir , [ ArtifactType . app , ArtifactType . binary , ArtifactType . gen_py ] , conf )
Link all artifacts required for a Docker image under base_dir and return the number of linked artifacts .
44,172
def _link ( self , base_dir : str , artifact_map : dict , conf : Config ) : num_linked = 0 for dst , src in artifact_map . items ( ) : abs_src = join ( conf . project_root , src ) abs_dest = join ( conf . project_root , base_dir , dst ) link_node ( abs_src , abs_dest ) num_linked += 1 return num_linked
Link all artifacts in artifact_map under base_dir and return the number of artifacts linked .
44,173
def get_readme ( ) : base_dir = path . abspath ( path . dirname ( __file__ ) ) with open ( path . join ( base_dir , 'README.md' ) , encoding = 'utf-8' ) as readme_f : return readme_f . read ( )
Read and return the content of the project README file .
44,174
def args_to_props ( target : Target , builder : Builder , args : list , kwargs : dict ) : if len ( args ) > len ( builder . sig ) : raise TypeError ( '{}() takes {}, but {} were given' . format ( target . builder_name , format_num_positional_arguments ( builder ) , len ( args ) ) ) for arg_name , value in zip ( builder . sig . keys ( ) , args ) : target . props [ arg_name ] = value for arg_name , value in kwargs . items ( ) : if arg_name not in builder . sig : raise TypeError ( "{}() got an unexpected keyword argument '{}'" . format ( target . builder_name , arg_name ) ) if arg_name in target . props : raise TypeError ( "{}() got multiple values for argument '{}'" . format ( target . builder_name , arg_name ) ) target . props [ arg_name ] = value missing_args = [ ] for arg_name , sig_spec in builder . sig . items ( ) : if arg_name not in target . props : if sig_spec . default == Empty : missing_args . append ( arg_name ) else : target . props [ arg_name ] = sig_spec . default if missing_args : raise TypeError ( '{}() missing {} required positional argument{}: {}' . format ( target . builder_name , len ( missing_args ) , 's' if len ( missing_args ) > 1 else '' , ', ' . join ( "'{}'" . format ( arg ) for arg in missing_args ) ) ) logger . debug ( 'Got props for target: {}' , target )
Convert build file args and kwargs to target props .
44,175
def extractor ( builder_name : str , builder : Builder , build_file_path : str , build_context ) -> types . FunctionType : build_module = to_build_module ( build_file_path , build_context . conf ) def extract_target ( * args , ** kwargs ) : target = Target ( builder_name = builder_name ) args_to_props ( target , builder , args , kwargs ) raw_name = target . props . name handle_typed_args ( target , builder , build_module ) logger . debug ( 'Extracting target: {}' , target ) target . name = target . props . pop ( 'name' ) target . deps = target . props . pop ( 'deps' , [ ] ) if target . deps : logger . debug ( 'Got deps for target "{0.name}": {0.deps}' , target ) for hook_name , hook in Plugin . get_hooks_for_builder ( builder_name ) : logger . debug ( 'About to invoke hook {} on target {}' , hook_name , target ) hook ( build_context , target ) build_context . register_target ( target ) logger . debug ( 'Registered {}' , target ) return extract_target
Return a target extraction function for a specific builder and a specific build file .
44,176
def rdopkg_runner ( ) : aman = ActionManager ( ) aman . add_actions_modules ( actions ) aman . fill_aliases ( ) return ActionRunner ( action_manager = aman )
default rdopkg action runner including rdopkg action modules
44,177
def rdopkg ( * cargs ) : runner = rdopkg_runner ( ) return shell . run ( runner , cargs = cargs , prog = 'rdopkg' , version = __version__ )
rdopkg CLI interface
44,178
def getDynDnsClientForConfig ( config , plugins = None ) : initparams = { } if "interval" in config : initparams [ "detect_interval" ] = config [ "interval" ] if plugins is not None : initparams [ "plugins" ] = plugins if "updater" in config : for updater_name , updater_options in config [ "updater" ] : initparams [ "updater" ] = get_updater_class ( updater_name ) ( ** updater_options ) if "detector" in config : detector_name , detector_opts = config [ "detector" ] [ - 1 ] try : klass = get_detector_class ( detector_name ) except KeyError as exc : LOG . warning ( "Invalid change detector configuration: '%s'" , detector_name , exc_info = exc ) return None thedetector = klass ( ** detector_opts ) initparams [ "detector" ] = thedetector return DynDnsClient ( ** initparams )
Instantiate and return a complete and working dyndns client .
44,179
def has_state_changed ( self ) : self . lastcheck = time . time ( ) if self . detector . can_detect_offline ( ) : self . detector . detect ( ) elif not self . dns . detect ( ) == self . detector . get_current_value ( ) : self . detector . detect ( ) if self . detector . has_changed ( ) : LOG . debug ( "detector changed" ) return True elif self . dns . has_changed ( ) : LOG . debug ( "dns changed" ) return True return False
Detect changes in offline detector and real DNS value .
44,180
def detect ( self ) : if self . opts_url and self . opts_parser : url = self . opts_url parser = self . opts_parser else : url , parser = choice ( self . urls ) parser = globals ( ) . get ( "_parser_" + parser ) theip = _get_ip_from_url ( url , parser ) if theip is None : LOG . info ( "Could not detect IP using webcheck! Offline?" ) self . set_current_value ( theip ) return theip
Try to contact a remote webservice and parse the returned output .
44,181
def add_plugin ( self , plugin , call ) : meth = getattr ( plugin , call , None ) if meth is not None : self . plugins . append ( ( plugin , meth ) )
Add plugin to list of plugins .
44,182
def listcall ( self , * arg , ** kw ) : final_result = None for _ , meth in self . plugins : result = meth ( * arg , ** kw ) if final_result is None and result is not None : final_result = result return final_result
Call each plugin sequentially .
44,183
def add_plugin ( self , plugin ) : new_name = self . plugin_name ( plugin ) self . _plugins [ : ] = [ p for p in self . _plugins if self . plugin_name ( p ) != new_name ] self . _plugins . append ( plugin )
Add the given plugin .
44,184
def configure ( self , args ) : for plug in self . _plugins : plug_name = self . plugin_name ( plug ) plug . enabled = getattr ( args , "plugin_%s" % plug_name , False ) if plug . enabled and getattr ( plug , "configure" , None ) : if callable ( getattr ( plug , "configure" , None ) ) : plug . configure ( args ) LOG . debug ( "Available plugins: %s" , self . _plugins ) self . plugins = [ plugin for plugin in self . _plugins if getattr ( plugin , "enabled" , False ) ] LOG . debug ( "Enabled plugins: %s" , self . plugins )
Configure the set of plugins with the given args .
44,185
def options ( self , parser , env ) : def get_help ( plug ) : import textwrap if plug . __class__ . __doc__ : return textwrap . dedent ( plug . __class__ . __doc__ ) return "(no help available)" for plug in self . _plugins : env_opt = ENV_PREFIX + self . plugin_name ( plug ) . upper ( ) env_opt = env_opt . replace ( "-" , "_" ) parser . add_argument ( "--with-%s" % self . plugin_name ( plug ) , action = "store_true" , dest = "plugin_%s" % self . plugin_name ( plug ) , default = env . get ( env_opt ) , help = "Enable plugin %s: %s [%s]" % ( plug . __class__ . __name__ , get_help ( plug ) , env_opt ) )
Register commandline options with the given parser .
44,186
def load_plugins ( self ) : from dyndnsc . plugins . builtin import PLUGINS for plugin in PLUGINS : self . add_plugin ( plugin ( ) ) super ( BuiltinPluginManager , self ) . load_plugins ( )
Load plugins from dyndnsc . plugins . builtin .
44,187
def keys ( self , section = None ) : if not section and self . section : section = self . section config = self . config . get ( section , { } ) if section else self . config return config . keys ( )
Provide dict like keys method
44,188
def items ( self , section = None ) : if not section and self . section : section = self . section config = self . config . get ( section , { } ) if section else self . config return config . items ( )
Provide dict like items method
44,189
def values ( self , section = None ) : if not section and self . section : section = self . section config = self . config . get ( section , { } ) if section else self . config return config . values ( )
Provide dict like values method
44,190
def _get_filepath ( self , filename = None , config_dir = None ) : config_file = None config_dir_env_var = self . env_prefix + '_DIR' if not filename : filename = os . getenv ( self . env_prefix , default = self . default_file ) if os . path . dirname ( filename ) and os . path . exists ( filename ) : config_file = filename if not config_file : filename = os . path . basename ( filename ) if not config_dir : config_dir = os . getenv ( config_dir_env_var , default = '' ) for path in [ self . basepath , self . config_root ] : filepath = os . path . join ( path , config_dir , filename ) if os . path . exists ( filepath ) : config_file = filepath break return config_file
Get config file .
44,191
def resolve ( hostname , family = AF_UNSPEC ) : af_ok = ( AF_INET , AF_INET6 ) if family != AF_UNSPEC and family not in af_ok : raise ValueError ( "Invalid family '%s'" % family ) ips = ( ) try : addrinfo = socket . getaddrinfo ( hostname , None , family ) except socket . gaierror as exc : if exc . errno not in ( socket . EAI_NODATA , socket . EAI_NONAME ) : LOG . debug ( "socket.getaddrinfo() raised an exception" , exc_info = exc ) else : if family == AF_UNSPEC : ips = tuple ( { item [ 4 ] [ 0 ] for item in addrinfo if item [ 0 ] in af_ok } ) else : ips = tuple ( { item [ 4 ] [ 0 ] for item in addrinfo } ) return ips
Resolve hostname to one or more IP addresses through the operating system .
44,192
def detect ( self ) : theip = next ( iter ( resolve ( self . opts_hostname , self . opts_family ) ) , None ) self . set_current_value ( theip ) return theip
Resolve the hostname to an IP address through the operating system .
44,193
def list_presets ( cfg , out = sys . stdout ) : for section in cfg . sections ( ) : if section . startswith ( "preset:" ) : out . write ( ( section . replace ( "preset:" , "" ) ) + os . linesep ) for k , v in cfg . items ( section ) : out . write ( "\t%s = %s" % ( k , v ) + os . linesep )
Write a human readable list of available presets to out .
44,194
def create_argparser ( ) : parser = argparse . ArgumentParser ( ) arg_defaults = { "daemon" : False , "loop" : False , "listpresets" : False , "config" : None , "debug" : False , "sleeptime" : 300 , "version" : False , "verbose_count" : 0 } parser . add_argument ( "-c" , "--config" , dest = "config" , help = "config file" , default = arg_defaults [ "config" ] ) parser . add_argument ( "--list-presets" , dest = "listpresets" , help = "list all available presets" , action = "store_true" , default = arg_defaults [ "listpresets" ] ) parser . add_argument ( "-d" , "--daemon" , dest = "daemon" , help = "go into daemon mode (implies --loop)" , action = "store_true" , default = arg_defaults [ "daemon" ] ) parser . add_argument ( "--debug" , dest = "debug" , help = "increase logging level to DEBUG (DEPRECATED, please use -vvv)" , action = "store_true" , default = arg_defaults [ "debug" ] ) parser . add_argument ( "--loop" , dest = "loop" , help = "loop forever (default is to update once)" , action = "store_true" , default = arg_defaults [ "loop" ] ) parser . add_argument ( "--sleeptime" , dest = "sleeptime" , help = "how long to sleep between checks in seconds" , default = arg_defaults [ "sleeptime" ] ) parser . add_argument ( "--version" , dest = "version" , help = "show version and exit" , action = "store_true" , default = arg_defaults [ "version" ] ) parser . add_argument ( "-v" , "--verbose" , dest = "verbose_count" , action = "count" , default = arg_defaults [ "verbose_count" ] , help = "increases log verbosity for each occurrence" ) return parser , arg_defaults
Instantiate an argparse . ArgumentParser .
44,195
def run_forever ( dyndnsclients ) : while True : try : time . sleep ( 15 ) for dyndnsclient in dyndnsclients : dyndnsclient . check ( ) except ( KeyboardInterrupt , ) : break except ( Exception , ) as exc : LOG . critical ( "An exception occurred in the dyndns loop" , exc_info = exc ) return 0
Run an endless loop accross the give dynamic dns clients .
44,196
def main ( ) : plugins = DefaultPluginManager ( ) plugins . load_plugins ( ) parser , _ = create_argparser ( ) for kls in updater_classes ( ) : kls . register_arguments ( parser ) for kls in detector_classes ( ) : kls . register_arguments ( parser ) from os import environ plugins . options ( parser , environ ) args = parser . parse_args ( ) if args . debug : args . verbose_count = 5 log_level = max ( int ( logging . WARNING / 10 ) - args . verbose_count , 0 ) * 10 logging . basicConfig ( level = log_level , format = "%(levelname)s %(message)s" ) if args . version : from . import __version__ print ( "dyndnsc %s" % __version__ ) return 0 requests_log = logging . getLogger ( "requests" ) requests_log . setLevel ( logging . WARNING ) logging . debug ( parser ) cfg = get_configuration ( args . config ) if args . listpresets : list_presets ( cfg ) return 0 if args . config : collected_configs = collect_config ( cfg ) else : parsed_args = parse_cmdline_args ( args , updater_classes ( ) . union ( detector_classes ( ) ) ) logging . debug ( "parsed_args %r" , parsed_args ) collected_configs = { "cmdline" : { "interval" : int ( args . sleeptime ) } } collected_configs [ "cmdline" ] . update ( parsed_args ) plugins . configure ( args ) plugins . initialize ( ) logging . debug ( "collected_configs: %r" , collected_configs ) dyndnsclients = [ ] for thisconfig in collected_configs : logging . debug ( "Initializing client for '%s'" , thisconfig ) dyndnsclient = getDynDnsClientForConfig ( collected_configs [ thisconfig ] , plugins = plugins ) if dyndnsclient is None : return 1 dyndnsclient . sync ( ) dyndnsclients . append ( dyndnsclient ) run_forever_callable = partial ( run_forever , dyndnsclients ) if args . daemon : import daemonocle daemon = daemonocle . Daemon ( worker = run_forever_callable ) daemon . do_action ( "start" ) args . loop = True if args . loop : run_forever_callable ( ) return 0
Run the main CLI program .
44,197
def get_configuration ( config_file = None ) : parser = configparser . ConfigParser ( ) if config_file is None : config_file = os . path . join ( os . getenv ( "HOME" ) , DEFAULT_USER_INI ) if not os . path . isfile ( config_file ) : config_file = None else : if not os . path . isfile ( config_file ) : raise ValueError ( "%s is not a file" % config_file ) configs = [ get_filename ( PRESETS_INI ) ] if config_file : configs . append ( config_file ) LOG . debug ( "Attempting to read configuration from %r" , configs ) read_configs = parser . read ( configs ) LOG . debug ( "Successfully read configuration from %r" , read_configs ) LOG . debug ( "config file sections: %r" , parser . sections ( ) ) return parser
Return an initialized ConfigParser .
44,198
def collect_config ( cfg ) : collected_configs = { } _updater_str = "updater" _detector_str = "detector" _dash = "-" for client_name , client_cfg_dict in _iraw_client_configs ( cfg ) : detector_name = None detector_options = { } updater_name = None updater_options = { } collected_config = { } for k in client_cfg_dict : if k . startswith ( _detector_str + _dash ) : detector_options [ k . replace ( _detector_str + _dash , "" ) ] = client_cfg_dict [ k ] elif k == _updater_str : updater_name = client_cfg_dict . get ( k ) elif k == _detector_str : detector_name = client_cfg_dict . get ( k ) elif k . startswith ( _updater_str + _dash ) : updater_options [ k . replace ( _updater_str + _dash , "" ) ] = client_cfg_dict [ k ] else : collected_config [ k ] = client_cfg_dict [ k ] collected_config [ _detector_str ] = [ ( detector_name , detector_options ) ] collected_config [ _updater_str ] = [ ( updater_name , updater_options ) ] collected_configs [ client_name ] = collected_config return collected_configs
Construct configuration dictionary from configparser .
44,199
def detect ( self ) : if PY3 : import subprocess else : import commands as subprocess try : theip = subprocess . getoutput ( self . opts_command ) except Exception : theip = None self . set_current_value ( theip ) return theip
Detect and return the IP address .