idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
28,500 | def create_subcommand_synopsis ( self , parser ) : self . add_usage ( parser . usage , parser . _get_positional_actions ( ) , None , prefix = '' ) usage = self . _format_usage ( parser . usage , parser . _get_positional_actions ( ) , None , '' ) return self . _bold ( usage ) | show usage with description for commands |
28,501 | def get_worker_build_info ( workflow , platform ) : workspace = workflow . plugin_workspace [ OrchestrateBuildPlugin . key ] return workspace [ WORKSPACE_KEY_BUILD_INFO ] [ platform ] | Obtain worker build information for a given platform |
28,502 | def override_build_kwarg ( workflow , k , v , platform = None ) : key = OrchestrateBuildPlugin . key workspace = workflow . plugin_workspace . setdefault ( key , { } ) override_kwargs = workspace . setdefault ( WORKSPACE_KEY_OVERRIDE_KWARGS , { } ) override_kwargs . setdefault ( platform , { } ) override_kwargs [ platf... | Override a build - kwarg for all worker builds |
28,503 | def wait_for_any_cluster ( contexts ) : try : earliest_retry_at = min ( ctx . retry_at for ctx in contexts . values ( ) if not ctx . failed ) except ValueError : raise AllClustersFailedException ( "Could not find appropriate cluster for worker build." ) time_until_next = earliest_retry_at - dt . datetime . now ( ) time... | Wait until any of the clusters are out of retry - wait |
28,504 | def validate_arrangement_version ( self ) : arrangement_version = self . build_kwargs [ 'arrangement_version' ] if arrangement_version is None : return if arrangement_version <= 5 : self . log . warning ( "arrangement_version <= 5 is deprecated and will be removed" " in release 1.6.38" ) | Validate if the arrangement_version is supported |
28,505 | def get_clusters ( self , platform , retry_contexts , all_clusters ) : possible_cluster_info = { } candidates = set ( copy . copy ( all_clusters ) ) while candidates and not possible_cluster_info : wait_for_any_cluster ( retry_contexts ) for cluster in sorted ( candidates , key = attrgetter ( 'priority' ) ) : ctx = ret... | return clusters sorted by load . |
28,506 | def get_koji_upload_dir ( ) : dir_prefix = 'koji-upload' random_chars = '' . join ( [ random . choice ( ascii_letters ) for _ in range ( 8 ) ] ) unique_fragment = '%r.%s' % ( time . time ( ) , random_chars ) return os . path . join ( dir_prefix , unique_fragment ) | Create a path name for uploading files to |
28,507 | def select_and_start_cluster ( self , platform ) : clusters = self . reactor_config . get_enabled_clusters_for_platform ( platform ) if not clusters : raise UnknownPlatformException ( 'No clusters found for platform {}!' . format ( platform ) ) retry_contexts = { cluster . name : ClusterRetryContext ( self . max_cluste... | Choose a cluster and start a build on it |
28,508 | def set_build_image ( self ) : current_platform = platform . processor ( ) orchestrator_platform = current_platform or 'x86_64' current_buildimage = self . get_current_buildimage ( ) for plat , build_image in self . build_image_override . items ( ) : self . log . debug ( 'Overriding build image for %s platform to %s' ,... | Overrides build_image for worker to be same as in orchestrator build |
28,509 | def get_manifest ( self , session , repository , ref ) : self . log . debug ( "%s: Retrieving manifest for %s:%s" , session . registry , repository , ref ) headers = { 'Accept' : ', ' . join ( self . manifest_media_types ) } url = '/v2/{}/manifests/{}' . format ( repository , ref ) response = session . get ( url , head... | Downloads a manifest from a registry . ref can be a digest or a tag . |
28,510 | def link_manifest_references_into_repository ( self , session , manifest , media_type , source_repo , target_repo ) : if source_repo == target_repo : return parsed = json . loads ( manifest . decode ( 'utf-8' ) ) references = [ ] if media_type in ( MEDIA_TYPE_DOCKER_V2_SCHEMA2 , MEDIA_TYPE_OCI_V1 ) : references . appen... | Links all the blobs referenced by the manifest from source_repo into target_repo . |
28,511 | def store_manifest_in_repository ( self , session , manifest , media_type , source_repo , target_repo , digest = None , tag = None ) : if tag : self . log . debug ( "%s: Tagging manifest (or list) from %s as %s:%s" , session . registry , source_repo , target_repo , tag ) ref = tag elif digest : self . log . debug ( "%s... | Stores the manifest into target_repo possibly tagging it . This may involve copying referenced blobs from source_repo . |
28,512 | def build_list ( self , manifests ) : media_type = manifests [ 0 ] [ 'media_type' ] if ( not all ( m [ 'media_type' ] == media_type for m in manifests ) ) : raise PluginFailedException ( 'worker manifests have inconsistent types: {}' . format ( manifests ) ) if media_type == MEDIA_TYPE_DOCKER_V2_SCHEMA2 : list_type = M... | Builds a manifest list or OCI image out of the given manifests |
28,513 | def group_manifests_and_tag ( self , session , worker_digests ) : self . log . info ( "%s: Creating manifest list" , session . registry ) manifests = [ ] for platform , worker_image in worker_digests . items ( ) : repository = worker_image [ 'repository' ] digest = worker_image [ 'digest' ] media_type = get_manifest_me... | Creates a manifest list or OCI image index that groups the different manifests in worker_digests then tags the result with with all the configured tags found in workflow . tag_conf . |
28,514 | def tag_manifest_into_registry ( self , session , worker_digest ) : self . log . info ( "%s: Tagging manifest" , session . registry ) digest = worker_digest [ 'digest' ] source_repo = worker_digest [ 'repository' ] image_manifest , _ , media_type , _ = self . get_manifest ( session , source_repo , digest ) if media_typ... | Tags the manifest identified by worker_digest into session . registry with all the configured tags found in workflow . tag_conf . |
28,515 | def detect_parent_image_nvr ( self , image_name , inspect_data = None ) : if inspect_data is None : inspect_data = self . workflow . builder . parent_image_inspect ( image_name ) labels = Labels ( inspect_data [ INSPECT_CONFIG ] . get ( 'Labels' , { } ) ) label_names = [ Labels . LABEL_TYPE_COMPONENT , Labels . LABEL_T... | Look for the NVR labels if any in the image . |
28,516 | def wait_for_parent_image_build ( self , nvr ) : self . log . info ( 'Waiting for Koji build for parent image %s' , nvr ) poll_start = time . time ( ) while time . time ( ) - poll_start < self . poll_timeout : build = self . koji_session . getBuild ( nvr ) if build : self . log . info ( 'Parent image Koji build found w... | Given image NVR wait for the build that produced it to show up in koji . If it doesn t within the timeout raise an error . |
28,517 | def make_result ( self ) : result = { } if self . _base_image_build : result [ BASE_IMAGE_KOJI_BUILD ] = self . _base_image_build if self . _parent_builds : result [ PARENT_IMAGES_KOJI_BUILDS ] = self . _parent_builds return result if result else None | Construct the result dict to be preserved in the build metadata . |
28,518 | def wait_for_command ( logs_generator ) : logger . info ( "wait_for_command" ) cr = CommandResult ( ) for item in logs_generator : cr . parse_item ( item ) logger . info ( "no more logs" ) return cr | Create a CommandResult from given iterator |
28,519 | def print_version_of_tools ( ) : logger . info ( "Using these tools:" ) for tool in get_version_of_tools ( ) : logger . info ( "%s-%s at %s" , tool [ "name" ] , tool [ "version" ] , tool [ "path" ] ) | print versions of used tools to logger |
28,520 | def guess_manifest_media_type ( content ) : encoding = guess_json_utf ( content ) try : manifest = json . loads ( content . decode ( encoding ) ) except ( ValueError , TypeError , UnicodeDecodeError ) : logger . exception ( "Unable to decode JSON" ) logger . debug ( "response content (%s): %r" , encoding , content ) re... | Guess the media type for the given manifest content |
28,521 | def manifest_is_media_type ( response , media_type ) : try : received_media_type = response . headers [ 'Content-Type' ] except KeyError : logger . debug ( "No Content-Type header; inspecting content" ) received_media_type = guess_manifest_media_type ( response . content ) logger . debug ( "guessed media type: %s" , re... | Attempt to confirm the returned manifest is of a given media type |
28,522 | def get_manifest_list ( image , registry , insecure = False , dockercfg_path = None ) : version = 'v2_list' registry_session = RegistrySession ( registry , insecure = insecure , dockercfg_path = dockercfg_path ) response , _ = get_manifest ( image , registry_session , version ) return response | Return manifest list for image . |
28,523 | def get_all_manifests ( image , registry , insecure = False , dockercfg_path = None , versions = ( 'v1' , 'v2' , 'v2_list' ) ) : digests = { } registry_session = RegistrySession ( registry , insecure = insecure , dockercfg_path = dockercfg_path ) for version in versions : response , _ = get_manifest ( image , registry_... | Return manifest digests for image . |
28,524 | def get_inspect_for_image ( image , registry , insecure = False , dockercfg_path = None ) : all_man_digests = get_all_manifests ( image , registry , insecure = insecure , dockercfg_path = dockercfg_path ) blob_config = None config_digest = None image_inspect = { } if 'v2_list' in all_man_digests : man_list_json = all_m... | Return inspect for image . |
28,525 | def df_parser ( df_path , workflow = None , cache_content = False , env_replace = True , parent_env = None ) : p_env = { } if parent_env : p_env = parent_env elif workflow : try : parent_config = workflow . builder . base_image_inspect [ INSPECT_CONFIG ] except ( AttributeError , TypeError , KeyError ) : logger . debug... | Wrapper for dockerfile_parse s DockerfileParser that takes into account parent_env inheritance . |
28,526 | def are_plugins_in_order ( plugins_conf , * plugins_names ) : all_plugins_names = [ plugin [ 'name' ] for plugin in plugins_conf or [ ] ] start_index = 0 for plugin_name in plugins_names : try : start_index = all_plugins_names . index ( plugin_name , start_index ) except ValueError : return False return True | Check if plugins are configured in given order . |
28,527 | def get_parent_image_koji_data ( workflow ) : koji_parent = workflow . prebuild_results . get ( PLUGIN_KOJI_PARENT_KEY ) or { } image_metadata = { } parents = { } for img , build in ( koji_parent . get ( PARENT_IMAGES_KOJI_BUILDS ) or { } ) . items ( ) : if not build : parents [ str ( img ) ] = None else : parents [ st... | Transform koji_parent plugin results into metadata dict . |
28,528 | def unpack_auth_b64 ( self , docker_registry ) : UnpackedAuth = namedtuple ( 'UnpackedAuth' , [ 'raw_str' , 'username' , 'password' ] ) credentials = self . get_credentials ( docker_registry ) auth_b64 = credentials . get ( 'auth' ) if auth_b64 : raw_str = b64decode ( auth_b64 ) . decode ( 'utf-8' ) unpacked_credential... | Decode and unpack base64 auth credentials from config file . |
28,529 | def default ( self ) : return self . v2_list or self . oci_index or self . oci or self . v2 or self . v1 | Return the default manifest schema version . |
28,530 | def get_log_files ( self , osbs , build_id ) : logs = None output = [ ] try : logs = osbs . get_orchestrator_build_logs ( build_id ) except OsbsException as ex : self . log . error ( "unable to get build logs: %r" , ex ) return output except TypeError : self . log . error ( "OSBS client does not support get_orchestrato... | Build list of log files |
28,531 | def update_image_digest ( self , image , platform , digest ) : image_name_tag = self . _key ( image ) image_name = image . to_str ( tag = False ) name_digest = '{}@{}' . format ( image_name , digest ) image_digests = self . _images_digests . setdefault ( image_name_tag , { } ) image_digests [ platform ] = name_digest | Update parent image digest for specific platform |
28,532 | def get_image_digests ( self , image ) : image_name_tag = self . _key ( image ) image_digests = self . _images_digests . get ( image_name_tag ) if image_digests is None : raise KeyError ( 'Image {} has no digest records' . format ( image_name_tag ) ) return image_digests | Get platform digests of specified image |
28,533 | def get_image_platform_digest ( self , image , platform ) : image_digests = self . get_image_digests ( image ) digest = image_digests . get ( platform ) if digest is None : raise KeyError ( 'Image {} has no digest record for platform {}' . format ( image , platform ) ) return digest | Get digest of specified image and platform |
28,534 | def rpm_qf_args ( tags = None , separator = ';' ) : if tags is None : tags = image_component_rpm_tags fmt = separator . join ( [ "%%{%s}" % tag for tag in tags ] ) return r"-qa --qf '{0}\n'" . format ( fmt ) | Return the arguments to pass to rpm to list RPMs in the format expected by parse_rpm_output . |
28,535 | def parse_rpm_output ( output , tags = None , separator = ';' ) : if tags is None : tags = image_component_rpm_tags def field ( tag ) : try : value = fields [ tags . index ( tag ) ] except ValueError : return None if value == '(none)' : return None return value components = [ ] sigmarker = 'Key ID ' for rpm in output :... | Parse output of the rpm query . |
28,536 | def koji_login ( session , proxyuser = None , ssl_certs_dir = None , krb_principal = None , krb_keytab = None ) : kwargs = { } if proxyuser : kwargs [ 'proxyuser' ] = proxyuser if ssl_certs_dir : logger . info ( "Using SSL certificates for Koji authentication" ) kwargs [ 'cert' ] = os . path . join ( ssl_certs_dir , 'c... | Choose the correct login method based on the available credentials and call that method on the provided session object . |
28,537 | def create_koji_session ( hub_url , auth_info = None ) : session = KojiSessionWrapper ( koji . ClientSession ( hub_url , opts = { 'krb_rdns' : False } ) ) if auth_info is not None : koji_login ( session , ** auth_info ) return session | Creates and returns a Koji session . If auth_info is provided the session will be authenticated . |
28,538 | def stream_task_output ( session , task_id , file_name , blocksize = DEFAULT_DOWNLOAD_BLOCK_SIZE ) : logger . debug ( 'Streaming {} from task {}' . format ( file_name , task_id ) ) offset = 0 contents = '[PLACEHOLDER]' while contents : contents = session . downloadTaskOutput ( task_id , file_name , offset , blocksize )... | Generator to download file from task without loading the whole file into memory . |
28,539 | def filename ( self ) : urlpath = unquote ( urlsplit ( self . repourl , allow_fragments = False ) . path ) basename = os . path . basename ( urlpath ) if not basename . endswith ( REPO_SUFFIX ) : basename += REPO_SUFFIX if self . add_hash : suffix = '-' + md5 ( self . repourl . encode ( 'utf-8' ) ) . hexdigest ( ) [ : ... | Returns the filename to be used for saving the repo file . |
28,540 | def filter_components_by_name ( name , components_list , type_ = T_RPM ) : for components in components_list : for component in components : if component [ 'type' ] == type_ and component [ 'name' ] == name : yield component | Generator filters components from components_list by name |
28,541 | def get_component_list_from_workers ( self , worker_metadatas ) : comp_list = [ ] for platform in sorted ( worker_metadatas . keys ( ) ) : for instance in worker_metadatas [ platform ] [ 'output' ] : if instance [ 'type' ] == 'docker-image' : if 'components' not in instance or not instance [ 'components' ] : self . log... | Find the component lists from each worker build . |
28,542 | def get_rpms ( self ) : tags = [ 'NAME' , 'VERSION' , 'RELEASE' , 'ARCH' , 'EPOCH' , 'SIGMD5' , 'SIGPGP:pgpsig' , 'SIGGPG:pgpsig' , ] cmd = "/bin/rpm " + rpm_qf_args ( tags ) try : ( status , output ) = subprocess . getstatusoutput ( cmd ) except AttributeError : with open ( '/dev/null' , 'r+' ) as devnull : p = subpro... | Build a list of installed RPMs in the format required for the metadata . |
28,543 | def get_output_metadata ( self , path , filename ) : checksums = get_checksums ( path , [ 'md5' ] ) metadata = { 'filename' : filename , 'filesize' : os . path . getsize ( path ) , 'checksum' : checksums [ 'md5sum' ] , 'checksum_type' : 'md5' } if self . metadata_only : metadata [ 'metadata_only' ] = True return metada... | Describe a file by its metadata . |
28,544 | def get_builder_image_id ( self ) : try : buildroot_tag = os . environ [ "OPENSHIFT_CUSTOM_BUILD_BASE_IMAGE" ] except KeyError : return '' try : pod = self . osbs . get_pod_for_build ( self . build_id ) all_images = pod . get_container_image_ids ( ) except OsbsException as ex : self . log . error ( "unable to find imag... | Find out the docker ID of the buildroot image we are in . |
28,545 | def get_image_components ( self ) : output = self . workflow . image_components if output is None : self . log . error ( "%s plugin did not run!" , PostBuildRPMqaPlugin . key ) output = [ ] return output | Re - package the output of the rpmqa plugin into the format required for the metadata . |
28,546 | def get_digests ( self ) : try : pulp = get_manifests_in_pulp_repository ( self . workflow ) except KeyError : pulp = None digests = { } for registry in self . workflow . push_conf . docker_registries : for image in self . workflow . tag_conf . images : image_str = image . to_str ( ) if image_str in registry . digests ... | Returns a map of images to their digests |
28,547 | def get_repositories ( self , digests ) : if self . workflow . push_conf . pulp_registries : registries = self . workflow . push_conf . pulp_registries else : registries = self . workflow . push_conf . all_registries output_images = [ ] for registry in registries : image = self . pullspec_image . copy ( ) image . regis... | Build the repositories metadata |
28,548 | def upload_file ( self , session , output , serverdir ) : name = output . metadata [ 'filename' ] self . log . debug ( "uploading %r to %r as %r" , output . file . name , serverdir , name ) kwargs = { } if self . blocksize is not None : kwargs [ 'blocksize' ] = self . blocksize self . log . debug ( "using blocksize %d"... | Upload a file to koji |
28,549 | def get_manifest_list_only_expectation ( self ) : if not self . workflow . postbuild_results . get ( PLUGIN_GROUP_MANIFESTS_KEY ) : self . log . debug ( 'Cannot check if only manifest list digest should be returned ' 'because group manifests plugin did not run' ) return False platforms = get_platforms ( self . workflow... | Get expectation for manifest list only |
28,550 | def run ( self ) : build_json = get_build_json ( ) current_platform = platform . processor ( ) or 'x86_64' self . manifest_list_cache = { } organization = get_registries_organization ( self . workflow ) for nonce , parent in enumerate ( sorted ( self . workflow . builder . parent_images . keys ( ) , key = str ) ) : if ... | Pull parent images and retag them uniquely for this build . |
28,551 | def _get_image_for_different_arch ( self , image , platform ) : parents_digests = self . workflow . builder . parent_images_digests try : digests = parents_digests . get_image_digests ( image ) except KeyError : return None if not digests : return None platform_digest = digests . get ( platform ) if platform_digest is ... | Get image from random arch |
28,552 | def _resolve_base_image ( self , build_json ) : spec = build_json . get ( "spec" ) try : image_id = spec [ 'triggeredBy' ] [ 0 ] [ 'imageChangeBuild' ] [ 'imageID' ] except ( TypeError , KeyError , IndexError ) : base_image = self . workflow . builder . base_image self . log . info ( "using %s as base image." , base_im... | If this is an auto - rebuild adjust the base image to use the triggering build |
28,553 | def _ensure_image_registry ( self , image ) : image_with_registry = image . copy ( ) if self . parent_registry : if image . registry and image . registry != self . parent_registry : error = ( "Registry specified in dockerfile image doesn't match configured one. " "Dockerfile: '%s'; expected registry: '%s'" % ( image , ... | If plugin configured with a parent registry ensure the image uses it |
28,554 | def _pull_and_tag_image ( self , image , build_json , nonce ) : image = image . copy ( ) first_library_exc = None for _ in range ( 20 ) : try : self . tasker . pull_image ( image , insecure = self . parent_registry_insecure , dockercfg_path = self . parent_registry_dockercfg_path ) self . workflow . pulled_base_images ... | Docker pull the image and tag it uniquely for use by this build |
28,555 | def _get_manifest_list ( self , image ) : if image in self . manifest_list_cache : return self . manifest_list_cache [ image ] manifest_list = get_manifest_list ( image , image . registry , insecure = self . parent_registry_insecure , dockercfg_path = self . parent_registry_dockercfg_path ) if '@sha256:' in str ( image... | try to figure out manifest list |
28,556 | def _validate_platforms_in_image ( self , image ) : expected_platforms = get_platforms ( self . workflow ) if not expected_platforms : self . log . info ( 'Skipping validation of available platforms ' 'because expected platforms are unknown' ) return if len ( expected_platforms ) == 1 : self . log . info ( 'Skipping va... | Ensure that the image provides all platforms expected for the build . |
28,557 | def get_dockercfg_credentials ( self , docker_registry ) : if not self . registry_secret_path : return { } dockercfg = Dockercfg ( self . registry_secret_path ) registry_creds = dockercfg . get_credentials ( docker_registry ) if 'username' not in registry_creds : return { } return { 'basic_auth_username' : registry_cre... | Read the . dockercfg file and return an empty dict or else a dict with keys basic_auth_username and basic_auth_password . |
28,558 | def start_compose ( self , source_type , source , packages = None , sigkeys = None , arches = None , flags = None , multilib_arches = None , multilib_method = None , modular_koji_tags = None ) : body = { 'source' : { 'type' : source_type , 'source' : source } } if source_type == "tag" : body [ 'source' ] [ 'packages' ]... | Start a new ODCS compose |
28,559 | def renew_compose ( self , compose_id ) : logger . info ( "Renewing compose %d" , compose_id ) response = self . session . patch ( '{}composes/{}' . format ( self . url , compose_id ) ) response . raise_for_status ( ) response_json = response . json ( ) compose_id = response_json [ 'id' ] logger . info ( "Renewed compo... | Renew or extend existing compose |
28,560 | def wait_for_compose ( self , compose_id , burst_retry = 1 , burst_length = 30 , slow_retry = 10 , timeout = 1800 ) : logger . debug ( "Getting compose information for information for compose_id={}" . format ( compose_id ) ) url = '{}composes/{}' . format ( self . url , compose_id ) start_time = time . time ( ) while T... | Wait for compose request to finalize |
28,561 | def _find_stages ( self ) : stages = [ ] end = last_user_found = None for part in reversed ( self . dfp . structure ) : if end is None : end = part if part [ 'instruction' ] == 'USER' and not last_user_found : last_user_found = part [ 'content' ] if part [ 'instruction' ] == 'FROM' : stages . insert ( 0 , { 'from_struc... | Find limits of each Dockerfile stage |
28,562 | def get_digests ( self ) : digests = { } for registry in self . workflow . push_conf . docker_registries : for image in self . workflow . tag_conf . images : image_str = image . to_str ( ) if image_str in registry . digests : digest = registry . digests [ image_str ] digests [ image . to_str ( registry = False ) ] = di... | Returns a map of repositories to digests |
28,563 | def _get_registries ( self ) : if self . workflow . buildstep_result . get ( PLUGIN_BUILD_ORCHESTRATE_KEY ) : registries = self . workflow . push_conf . pulp_registries if not registries : registries = self . workflow . push_conf . all_registries return registries else : return self . workflow . push_conf . all_registr... | Return a list of registries that this build updated |
28,564 | def get_repositories_and_digests ( self ) : digests = { } typed_digests = { } for registry in self . workflow . push_conf . docker_registries : for image in self . workflow . tag_conf . images : image_str = image . to_str ( ) if image_str in registry . digests : image_digests = registry . digests [ image_str ] if self ... | Returns a map of images to their repositories and a map of media types to each digest |
28,565 | def update_buildroot_koji ( self , buildroot , output ) : docker = output [ 1 ] [ 'extra' ] [ 'docker' ] name = '' for tag in docker [ 'tags' ] : for repo in docker [ 'repositories' ] : if tag in repo : iname = ImageName . parse ( repo ) name = iname . to_str ( registry = False ) break buildroot [ 'extra' ] [ 'osbs' ] ... | put the final koji information in the buildroot under extra . osbs |
28,566 | def get_metadata ( self ) : try : metadata = get_build_json ( ) [ "metadata" ] self . build_id = metadata [ "name" ] except KeyError : self . log . error ( "No build metadata" ) raise for image in self . workflow . tag_conf . unique_images : self . pullspec_image = image break for image in self . workflow . tag_conf . ... | Build the metadata needed for importing the build |
28,567 | def get_worker_digests ( self ) : try : builds = self . workflow . build_result . annotations [ 'worker-builds' ] except ( TypeError , KeyError ) : return { } worker_digests = { } for plat , annotation in builds . items ( ) : digests = annotation [ 'digests' ] self . log . debug ( "build %s has digests: %s" , plat , di... | If we are being called from an orchestrator build collect the worker node data and recreate the data locally . |
28,568 | def make_remote_image_result ( annotations = None , labels = None ) : return BuildResult ( image_id = BuildResult . REMOTE_IMAGE , annotations = annotations , labels = labels ) | Instantiate BuildResult for image not built locally . |
28,569 | def base_image_inspect ( self ) : if self . _base_image_inspect is None : if self . base_from_scratch : self . _base_image_inspect = { } elif self . parents_pulled or self . custom_base_image : try : self . _base_image_inspect = self . tasker . inspect_image ( self . base_image ) except docker . errors . NotFound : rai... | inspect base image |
28,570 | def parent_image_inspect ( self , image ) : image_name = ImageName . parse ( image ) if image_name not in self . _parent_images_inspect : if self . parents_pulled : self . _parent_images_inspect [ image_name ] = self . tasker . inspect_image ( image ) else : self . _parent_images_inspect [ image_name ] = atomic_reactor... | inspect parent image |
28,571 | def inspect_built_image ( self ) : logger . info ( "inspecting built image '%s'" , self . image_id ) self . ensure_is_built ( ) inspect_data = self . tasker . inspect_image ( self . image_id ) return inspect_data | inspect built image |
28,572 | def get_base_image_info ( self ) : if self . base_from_scratch : return logger . info ( "getting information about base image '%s'" , self . base_image ) image_info = self . tasker . get_image_info_by_image_name ( self . base_image ) items_count = len ( image_info ) if items_count == 1 : return image_info [ 0 ] elif it... | query docker about base image |
28,573 | def get_built_image_info ( self ) : logger . info ( "getting information about built image '%s'" , self . image ) image_info = self . tasker . get_image_info_by_image_name ( self . image ) items_count = len ( image_info ) if items_count == 1 : return image_info [ 0 ] elif items_count <= 0 : logger . error ( "image '%s'... | query docker about built image |
28,574 | def build_inside ( input_method , input_args = None , substitutions = None ) : def process_keyvals ( keyvals ) : keyvals = keyvals or [ ] processed_keyvals = { } for arg in keyvals : key , value = arg . split ( "=" , 1 ) processed_keyvals [ key ] = value return processed_keyvals main = __name__ . split ( '.' , 1 ) [ 0 ... | use requested input plugin to load configuration and then initiate build |
28,575 | def run ( self ) : while True : with self . _lock : self . _update ( self . _data ) if self . _done : break time . sleep ( 1 ) | Overrides parent method to implement thread s functionality . |
28,576 | def get_config ( workflow ) : try : workspace = workflow . plugin_workspace [ ReactorConfigPlugin . key ] return workspace [ WORKSPACE_CONF_KEY ] except KeyError : conf = ReactorConfig ( ) workspace = workflow . plugin_workspace . get ( ReactorConfigPlugin . key , { } ) workspace [ WORKSPACE_CONF_KEY ] = conf workflow ... | Obtain configuration object Does not fail |
28,577 | def run ( self ) : if self . reactor_config_map : self . log . info ( "reading config from REACTOR_CONFIG env variable" ) conf = read_yaml ( self . reactor_config_map , 'schemas/config.json' ) else : config_filename = os . path . join ( self . config_path , self . basename ) self . log . info ( "reading config from %s"... | Run the plugin |
28,578 | def _check_build_input ( self , image , args_path ) : try : with open ( os . path . join ( args_path , BUILD_JSON ) ) as json_args : logger . debug ( "build input: image = '%s', args = '%s'" , image , json_args . read ( ) ) except ( IOError , OSError ) as ex : logger . error ( "unable to open json arguments: %r" , ex )... | Internal method validate provided args . |
28,579 | def build_image_from_path ( self , path , image , use_cache = False , remove_im = True ) : logger . info ( "building image '%s' from path '%s'" , image , path ) response = self . d . build ( path = path , tag = image . to_str ( ) , nocache = not use_cache , decode = True , rm = remove_im , forcerm = True , pull = False... | build image from provided path and tag it |
28,580 | def build_image_from_git ( self , url , image , git_path = None , git_commit = None , copy_dockerfile_to = None , stream = False , use_cache = False ) : logger . info ( "building image '%s' from git repo '%s' specified as URL '%s'" , image , git_path , url ) logger . info ( "will copy Dockerfile to '%s'" , copy_dockerf... | build image from provided url and tag it |
28,581 | def run ( self , image , command = None , create_kwargs = None , start_kwargs = None , volume_bindings = None , privileged = None ) : logger . info ( "creating container from image '%s' and running it" , image ) create_kwargs = create_kwargs or { } if 'host_config' not in create_kwargs : conf = { } if volume_bindings i... | create container from provided image and start it |
28,582 | def commit_container ( self , container_id , image = None , message = None ) : logger . info ( "committing container '%s'" , container_id ) logger . debug ( "container_id = '%s', image = '%s', message = '%s'" , container_id , image , message ) tag = None if image : tag = image . tag image = image . to_str ( tag = False... | create image from provided container |
28,583 | def pull_image ( self , image , insecure = False , dockercfg_path = None ) : logger . info ( "pulling image '%s' from registry" , image ) logger . debug ( "image = '%s', insecure = '%s'" , image , insecure ) tag = image . tag if dockercfg_path : self . login ( registry = image . registry , docker_secret_path = dockercf... | pull provided image from registry |
28,584 | def tag_image ( self , image , target_image , force = False ) : logger . info ( "tagging image '%s' as '%s'" , image , target_image ) logger . debug ( "image = '%s', target_image_name = '%s'" , image , target_image ) if not isinstance ( image , ImageName ) : image = ImageName . parse ( image ) if image != target_image ... | tag provided image with specified image_name registry and tag |
28,585 | def login ( self , registry , docker_secret_path ) : logger . info ( "logging in: registry '%s', secret path '%s'" , registry , docker_secret_path ) dockercfg = Dockercfg ( docker_secret_path ) credentials = dockercfg . get_credentials ( registry ) unpacked_auth = dockercfg . unpack_auth_b64 ( registry ) username = cre... | login to docker registry |
28,586 | def push_image ( self , image , insecure = False ) : logger . info ( "pushing image '%s'" , image ) logger . debug ( "image: '%s', insecure: '%s'" , image , insecure ) try : command_result = self . retry_generator ( self . d . push , image . to_str ( tag = False ) , tag = image . tag , insecure_registry = insecure , de... | push provided image to registry |
28,587 | def tag_and_push_image ( self , image , target_image , insecure = False , force = False , dockercfg = None ) : logger . info ( "tagging and pushing image '%s' as '%s'" , image , target_image ) logger . debug ( "image = '%s', target_image = '%s'" , image , target_image ) self . tag_image ( image , target_image , force =... | tag provided image and push it to registry |
28,588 | def remove_image ( self , image_id , force = False , noprune = False ) : logger . info ( "removing image '%s' from filesystem" , image_id ) logger . debug ( "image_id = '%s'" , image_id ) if isinstance ( image_id , ImageName ) : image_id = image_id . to_str ( ) self . d . remove_image ( image_id , force = force , nopru... | remove provided image from filesystem |
28,589 | def remove_container ( self , container_id , force = False ) : logger . info ( "removing container '%s' from filesystem" , container_id ) logger . debug ( "container_id = '%s'" , container_id ) self . d . remove_container ( container_id , force = force ) | remove provided container from filesystem |
28,590 | def image_exists ( self , image_id ) : logger . info ( "checking whether image '%s' exists" , image_id ) logger . debug ( "image_id = '%s'" , image_id ) try : response = self . d . inspect_image ( image_id ) except APIError as ex : logger . warning ( repr ( ex ) ) response = False else : response = response is not None... | does provided image exists? |
28,591 | def get_volumes_for_container ( self , container_id , skip_empty_source = True ) : logger . info ( "listing volumes for container '%s'" , container_id ) inspect_output = self . d . inspect_container ( container_id ) volumes = inspect_output [ 'Mounts' ] or [ ] volume_names = [ x [ 'Name' ] for x in volumes ] if skip_em... | get a list of volumes mounter in a container |
28,592 | def remove_volume ( self , volume_name ) : logger . info ( "removing volume '%s'" , volume_name ) try : self . d . remove_volume ( volume_name ) except APIError as ex : if ex . response . status_code == requests . codes . CONFLICT : logger . debug ( "ignoring a conflict when removing volume %s" , volume_name ) else : r... | remove a volume by its name |
28,593 | def run ( self ) : env_name = self . env_name or BUILD_JSON_ENV try : build_cfg_json = os . environ [ env_name ] except KeyError : self . log . error ( "build config not found in env variable '%s'" , env_name ) return None else : try : return self . substitute_configuration ( json . loads ( build_cfg_json ) ) except Va... | get json with build config from environment variable |
28,594 | def get_default_image_build_conf ( self ) : target = self . koji_target vcs_info = self . workflow . source . get_vcs_info ( ) ksurl = '{}#{}' . format ( vcs_info . vcs_url , vcs_info . vcs_ref ) base_urls = [ ] for repo in self . repos : for url in self . extract_base_url ( repo ) : url = url . replace ( '$basearch' ,... | Create a default image build config |
28,595 | def update_config_from_dockerfile ( self , config ) : labels = Labels ( df_parser ( self . workflow . builder . df_path ) . labels ) for config_key , label in ( ( 'name' , Labels . LABEL_TYPE_COMPONENT ) , ( 'version' , Labels . LABEL_TYPE_VERSION ) , ) : try : _ , value = labels . get_name_and_value ( label ) except K... | Updates build config with values from the Dockerfile |
28,596 | def load_plugins ( self , plugin_class_name ) : plugins_dir = os . path . join ( os . path . dirname ( __file__ ) , 'plugins' ) logger . debug ( "loading plugins from dir '%s'" , plugins_dir ) files = [ os . path . join ( plugins_dir , f ) for f in os . listdir ( plugins_dir ) if f . endswith ( ".py" ) ] if self . plug... | load all available plugins |
28,597 | def get_available_plugins ( self ) : available_plugins = [ ] PluginData = namedtuple ( 'PluginData' , 'name, plugin_class, conf, is_allowed_to_fail' ) for plugin_request in self . plugins_conf : plugin_name = plugin_request [ 'name' ] try : plugin_class = self . plugin_classes [ plugin_name ] except KeyError : if plugi... | check requested plugins availability and handle missing plugins |
28,598 | def run ( self , keep_going = False , buildstep_phase = False ) : failed_msgs = [ ] plugin_successful = False plugin_response = None available_plugins = self . available_plugins for plugin in available_plugins : plugin_successful = False logger . debug ( "running plugin '%s'" , plugin . name ) start_time = datetime . d... | run all requested plugins |
28,599 | def _should_send ( self , rebuild , success , auto_canceled , manual_canceled ) : should_send = False should_send_mapping = { self . MANUAL_SUCCESS : not rebuild and success , self . MANUAL_FAIL : not rebuild and not success , self . MANUAL_CANCELED : not rebuild and manual_canceled , self . AUTO_SUCCESS : rebuild and ... | Return True if any state in self . send_on meets given conditions thus meaning that a notification mail should be sent . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.