idx int64 0 24.9k | question stringlengths 68 4.14k | target stringlengths 9 749 |
|---|---|---|
300 | def create_profile! cert = certificate_to_use bundle_id = Sigh . config [ :app_identifier ] name = Sigh . config [ :provisioning_name ] || [ bundle_id , profile_type . pretty_type ] . join ( ' ' ) unless Sigh . config [ :skip_fetch_profiles ] if Spaceship . provisioning_profile . all . find { | p | p . name == name } UI . error ( "The name '#{name}' is already taken, using another one." ) name += " #{Time.now.to_i}" end end UI . important ( "Creating new provisioning profile for '#{Sigh.config[:app_identifier]}' with name '#{name}' for '#{Sigh.config[:platform]}' platform" ) profile = profile_type . create! ( name : name , bundle_id : bundle_id , certificate : cert , mac : Sigh . config [ :platform ] . to_s == 'macos' , sub_platform : Sigh . config [ :platform ] . to_s == 'tvos' ? 'tvOS' : nil , template_name : Sigh . config [ :template_name ] ) profile end | Create a new profile and return it |
301 | def certificate_to_use certificates = certificates_for_profile_and_platform certificates = certificates . find_all do | c | if Sigh . config [ :cert_id ] next unless c . id == Sigh . config [ :cert_id ] . strip end if Sigh . config [ :cert_owner_name ] next unless c . owner_name . strip == Sigh . config [ :cert_owner_name ] . strip end true end if Helper . mac? unless Sigh . config [ :skip_certificate_verification ] certificates = certificates . find_all do | c | file = Tempfile . new ( 'cert' ) file . write ( c . download_raw ) file . close FastlaneCore :: CertChecker . installed? ( file . path ) end end end if certificates . count > 1 && ! Sigh . config [ :development ] UI . important ( "Found more than one code signing identity. Choosing the first one. Check out `fastlane sigh --help` to see all available options." ) UI . important ( "Available Code Signing Identities for current filters:" ) certificates . each do | c | str = [ "\t- Name:" , c . owner_name , "- ID:" , c . id + " - Expires" , c . expires . strftime ( "%d/%m/%Y" ) ] . join ( " " ) UI . message ( str . green ) end end if certificates . count == 0 filters = "" filters << "Owner Name: '#{Sigh.config[:cert_owner_name]}' " if Sigh . config [ :cert_owner_name ] filters << "Certificate ID: '#{Sigh.config[:cert_id]}' " if Sigh . config [ :cert_id ] UI . important ( "No certificates for filter: #{filters}" ) if filters . length > 0 message = "Could not find a matching code signing identity for type '#{profile_type.to_s.split(':').last}'. " message += "It is recommended to use match to manage code signing for you, more information on https://codesigning.guide. " message += "If you don't want to do so, you can also use cert to generate a new one: https://fastlane.tools/cert" UI . user_error! ( message ) end return certificates if Sigh . config [ :development ] return certificates . first end | Certificate to use based on the current distribution mode |
302 | def download_profile ( profile ) UI . important ( "Downloading provisioning profile..." ) profile_name ||= "#{profile_type.pretty_type}_#{Sigh.config[:app_identifier]}" if Sigh . config [ :platform ] . to_s == 'tvos' profile_name += "_tvos" end if Sigh . config [ :platform ] . to_s == 'macos' profile_name += '.provisionprofile' else profile_name += '.mobileprovision' end tmp_path = Dir . mktmpdir ( "profile_download" ) output_path = File . join ( tmp_path , profile_name ) File . open ( output_path , "wb" ) do | f | f . write ( profile . download ) end UI . success ( "Successfully downloaded provisioning profile..." ) return output_path end | Downloads and stores the provisioning profile |
303 | def ensure_app_exists! return if Spaceship :: App . find ( Sigh . config [ :app_identifier ] , mac : Sigh . config [ :platform ] . to_s == 'macos' ) print_produce_command ( Sigh . config ) UI . user_error! ( "Could not find App with App Identifier '#{Sigh.config[:app_identifier]}'" ) end | Makes sure the current App ID exists . If not it will show an appropriate error message |
304 | def download_all ( download_xcode_profiles : false ) UI . message ( "Starting login with user '#{Sigh.config[:username]}'" ) Spaceship . login ( Sigh . config [ :username ] , nil ) Spaceship . select_team UI . message ( "Successfully logged in" ) Spaceship . provisioning_profile . all ( xcode : download_xcode_profiles ) . each do | profile | if profile . valid? UI . message ( "Downloading profile '#{profile.name}'..." ) download_profile ( profile ) else UI . important ( "Skipping invalid/expired profile '#{profile.name}'" ) end end if download_xcode_profiles UI . message ( "This run also included all Xcode managed provisioning profiles, as you used the `--download_xcode_profiles` flag" ) else UI . message ( "All Xcode managed provisioning profiles were ignored on this, to include them use the `--download_xcode_profiles` flag" ) end end | Download all valid provisioning profiles |
305 | def handle_control_command ( command ) exit_reason = nil if command . cancel_signal? UI . verbose ( "received cancel signal shutting down, reason: #{command.reason}" ) cancel_response = '{"payload":{"status":"cancelled"}}' send_response ( cancel_response ) exit_reason = :cancelled elsif command . done_signal? UI . verbose ( "received done signal shutting down" ) exit_reason = :done end if command . user_message UI . important ( command . user_message ) end handle_disconnect ( error : false , exit_reason : exit_reason ) return COMMAND_EXECUTION_STATE [ :already_shutdown ] end | we got a server control command from the client to do something like shutdown |
306 | def send_response ( json ) UI . verbose ( "sending #{json}" ) begin @client . puts ( json ) rescue Errno :: EPIPE => e UI . verbose ( e ) return COMMAND_EXECUTION_STATE [ :error ] end return COMMAND_EXECUTION_STATE [ :ready ] end | send json back to client |
307 | def execute_action_command ( command : nil ) command_return = @command_executor . execute ( command : command , target_object : nil ) return_object = command_return . return_value return_value_type = command_return . return_value_type closure_arg = command_return . closure_argument_value return_object = return_value_processor . prepare_object ( return_value : return_object , return_value_type : return_value_type ) if closure_arg . nil? closure_arg = closure_arg . to_s else closure_arg = return_value_processor . prepare_object ( return_value : closure_arg , return_value_type : :string ) end Thread . current [ :exception ] = nil payload = { payload : { status : "ready_for_next" , return_object : return_object , closure_argument_value : closure_arg } } return JSON . generate ( payload ) rescue StandardError => e Thread . current [ :exception ] = e exception_array = [ ] exception_array << "#{e.class}:" exception_array << e . backtrace while e . respond_to? ( "cause" ) && ( e = e . cause ) exception_array << "cause: #{e.class}" exception_array << e . backtrace end payload = { payload : { status : "failure" , failure_information : exception_array . flatten } } return JSON . generate ( payload ) end | execute fastlane action command |
308 | def schemes @schemes ||= if workspace? workspace . schemes . reject do | k , v | v . include? ( "Pods/Pods.xcodeproj" ) end . keys else Xcodeproj :: Project . schemes ( path ) end end | Get all available schemes in an array |
309 | def select_scheme ( preferred_to_include : nil ) if options [ :scheme ] . to_s . length > 0 unless schemes . include? ( options [ :scheme ] . to_s ) UI . error ( "Couldn't find specified scheme '#{options[:scheme]}'. Please make sure that the scheme is shared, see https://developer.apple.com/library/content/documentation/IDEs/Conceptual/xcode_guide-continuous_integration/ConfigureBots.html#//apple_ref/doc/uid/TP40013292-CH9-SW3" ) options [ :scheme ] = nil end end return if options [ :scheme ] . to_s . length > 0 if schemes . count == 1 options [ :scheme ] = schemes . last elsif schemes . count > 1 preferred = nil if preferred_to_include preferred = schemes . find_all { | a | a . downcase . include? ( preferred_to_include . downcase ) } end if preferred_to_include && preferred . count == 1 options [ :scheme ] = preferred . last elsif automated_scheme_selection? && schemes . include? ( project_name ) UI . important ( "Using scheme matching project name (#{project_name})." ) options [ :scheme ] = project_name elsif Helper . ci? UI . error ( "Multiple schemes found but you haven't specified one." ) UI . error ( "Since this is a CI, please pass one using the `scheme` option" ) show_scheme_shared_information UI . user_error! ( "Multiple schemes found" ) else puts ( "Select Scheme: " ) options [ :scheme ] = choose ( * schemes ) end else show_scheme_shared_information UI . user_error! ( "No Schemes found" ) end end | Let the user select a scheme Use a scheme containing the preferred_to_include string when multiple schemes were found |
310 | def configurations @configurations ||= if workspace? workspace . file_references . map ( & :path ) . reject { | p | p . include? ( "Pods/Pods.xcodeproj" ) } . map do | p | begin Xcodeproj :: Project . open ( p ) . build_configurations rescue [ ] end end . flatten . compact . map ( & :name ) else project . build_configurations . map ( & :name ) end end | Get all available configurations in an array |
311 | def default_build_settings ( key : nil , optional : true ) options [ :scheme ] ||= schemes . first if is_workspace build_settings ( key : key , optional : optional ) end | Returns the build settings and sets the default scheme to the options hash |
312 | def print_tables puts ( "" ) if self . certs . count > 0 rows = self . certs . collect do | cert | cert_expiration = cert . expires . nil? ? "Unknown" : cert . expires . strftime ( "%Y-%m-%d" ) [ cert . name , cert . id , cert . class . to_s . split ( "::" ) . last , cert_expiration ] end puts ( Terminal :: Table . new ( { title : "Certificates that are going to be revoked" . green , headings : [ "Name" , "ID" , "Type" , "Expires" ] , rows : FastlaneCore :: PrintTable . transform_output ( rows ) } ) ) puts ( "" ) end if self . profiles . count > 0 rows = self . profiles . collect do | p | status = p . status == 'Active' ? p . status . green : p . status . red expires = p . expires ? p . expires . strftime ( "%Y-%m-%d" ) : nil [ p . name , p . id , status , p . type , expires ] end puts ( Terminal :: Table . new ( { title : "Provisioning Profiles that are going to be revoked" . green , headings : [ "Name" , "ID" , "Status" , "Type" , "Expires" ] , rows : FastlaneCore :: PrintTable . transform_output ( rows ) } ) ) puts ( "" ) end if self . files . count > 0 rows = self . files . collect do | f | components = f . split ( File :: SEPARATOR ) [ - 3 .. - 1 ] file_type = components [ 0 .. 1 ] . reverse . join ( " " ) [ 0 .. - 2 ] [ file_type , components [ 2 ] ] end puts ( Terminal :: Table . new ( { title : "Files that are going to be deleted" . green + "\n" + self . storage . human_readable_description , headings : [ "Type" , "File Name" ] , rows : rows } ) ) puts ( "" ) end end | Print tables to ask the user |
313 | def upload ( options ) return if options [ :skip_metadata ] verify_available_languages! ( options ) unless options [ :edit_live ] app = options [ :app ] details = app . details if options [ :edit_live ] v = app . live_version ( platform : options [ :platform ] ) localised_options = LOCALISED_LIVE_VALUES non_localised_options = NON_LOCALISED_LIVE_VALUES if v . nil? UI . message ( "Couldn't find live version, editing the current version on App Store Connect instead" ) v = app . edit_version ( platform : options [ :platform ] ) end else v = app . edit_version ( platform : options [ :platform ] ) localised_options = ( LOCALISED_VERSION_VALUES + LOCALISED_APP_VALUES ) non_localised_options = ( NON_LOCALISED_VERSION_VALUES + NON_LOCALISED_APP_VALUES ) end individual = options [ :individual_metadata_items ] || [ ] localised_options . each do | key | current = options [ key ] next unless current unless current . kind_of? ( Hash ) UI . error ( "Error with provided '#{key}'. Must be a hash, the key being the language." ) next end current . each do | language , value | next unless value . to_s . length > 0 strip_value = value . to_s . strip if individual . include? ( key . to_s ) upload_individual_item ( app , v , language , key , strip_value ) else v . send ( key ) [ language ] = strip_value if LOCALISED_VERSION_VALUES . include? ( key ) details . send ( key ) [ language ] = strip_value if LOCALISED_APP_VALUES . include? ( key ) end end end non_localised_options . each do | key | current = options [ key ] . to_s . strip next unless current . to_s . length > 0 v . send ( "#{key}=" , current ) if NON_LOCALISED_VERSION_VALUES . include? ( key ) details . send ( "#{key}=" , current ) if NON_LOCALISED_APP_VALUES . include? ( key ) end v . release_on_approval = options [ :automatic_release ] v . auto_release_date = options [ :auto_release_date ] unless options [ :auto_release_date ] . nil? v . toggle_phased_release ( enabled : ! ! options [ :phased_release ] ) unless options [ :phased_release ] . nil? set_trade_representative_contact_information ( v , options ) set_review_information ( v , options ) set_app_rating ( v , options ) v . ratings_reset = options [ :reset_ratings ] unless options [ :reset_ratings ] . nil? Helper . show_loading_indicator ( "Uploading metadata to App Store Connect" ) v . save! Helper . hide_loading_indicator begin details . save! UI . success ( "Successfully uploaded set of metadata to App Store Connect" ) rescue Spaceship :: TunesClient :: ITunesConnectError => e if e . message . include? ( 'App Name cannot be longer than 50 characters' ) || e . message . include? ( 'The app name you entered is already being used' ) UI . error ( "Error in app name. Try using 'individual_metadata_items' to identify the problem language." ) UI . user_error! ( e . message ) else raise e end end end | Make sure to call load_from_filesystem before calling upload |
314 | def upload_individual_item ( app , version , language , key , value ) details = app . details version . send ( key ) [ language ] = value if LOCALISED_VERSION_VALUES . include? ( key ) details . send ( key ) [ language ] = value if LOCALISED_APP_VALUES . include? ( key ) Helper . show_loading_indicator ( "Uploading #{language} #{key} to App Store Connect" ) version . save! Helper . hide_loading_indicator begin details . save! UI . success ( "Successfully uploaded #{language} #{key} to App Store Connect" ) rescue Spaceship :: TunesClient :: ITunesConnectError => e UI . error ( "Error in #{language} #{key}: \n#{value}" ) UI . error ( e . message ) end end | Uploads metadata individually by language to help identify exactly which items have issues |
315 | def verify_available_languages! ( options ) return if options [ :skip_metadata ] v = options [ :app ] . edit_version ( platform : options [ :platform ] ) UI . user_error! ( "Could not find a version to edit for app '#{options[:app].name}', the app metadata is read-only currently" ) unless v enabled_languages = options [ :languages ] || [ ] LOCALISED_VERSION_VALUES . each do | key | current = options [ key ] next unless current && current . kind_of? ( Hash ) current . each do | language , value | language = language . to_s enabled_languages << language unless enabled_languages . include? ( language ) end end enabled_languages = enabled_languages . reject do | lang | lang == "default" end . uniq if enabled_languages . count > 0 v . create_languages ( enabled_languages ) lng_text = "language" lng_text += "s" if enabled_languages . count != 1 Helper . show_loading_indicator ( "Activating #{lng_text} #{enabled_languages.join(', ')}..." ) v . save! Helper . hide_loading_indicator end true end | Makes sure all languages we need are actually created |
316 | def load_from_filesystem ( options ) return if options [ :skip_metadata ] ignore_validation = options [ :ignore_language_directory_validation ] Loader . language_folders ( options [ :metadata_path ] , ignore_validation ) . each do | lang_folder | language = File . basename ( lang_folder ) ( LOCALISED_VERSION_VALUES + LOCALISED_APP_VALUES ) . each do | key | path = File . join ( lang_folder , "#{key}.txt" ) next unless File . exist? ( path ) UI . message ( "Loading '#{path}'..." ) options [ key ] ||= { } options [ key ] [ language ] ||= File . read ( path ) end end ( NON_LOCALISED_VERSION_VALUES + NON_LOCALISED_APP_VALUES ) . each do | key | path = File . join ( options [ :metadata_path ] , "#{key}.txt" ) next unless File . exist? ( path ) UI . message ( "Loading '#{path}'..." ) options [ key ] ||= File . read ( path ) end options [ :trade_representative_contact_information ] ||= { } TRADE_REPRESENTATIVE_CONTACT_INFORMATION_VALUES . values . each do | option_name | path = File . join ( options [ :metadata_path ] , TRADE_REPRESENTATIVE_CONTACT_INFORMATION_DIR , "#{option_name}.txt" ) next unless File . exist? ( path ) next if options [ :trade_representative_contact_information ] [ option_name ] . to_s . length > 0 UI . message ( "Loading '#{path}'..." ) options [ :trade_representative_contact_information ] [ option_name ] ||= File . read ( path ) end options [ :app_review_information ] ||= { } REVIEW_INFORMATION_VALUES . values . each do | option_name | path = File . join ( options [ :metadata_path ] , REVIEW_INFORMATION_DIR , "#{option_name}.txt" ) next unless File . exist? ( path ) next if options [ :app_review_information ] [ option_name ] . to_s . length > 0 UI . message ( "Loading '#{path}'..." ) options [ :app_review_information ] [ option_name ] ||= File . read ( path ) end end | Loads the metadata files and stores them into the options object |
317 | def normalize_language_keys ( options ) ( LOCALISED_VERSION_VALUES + LOCALISED_APP_VALUES ) . each do | key | current = options [ key ] next unless current && current . kind_of? ( Hash ) current . keys . each do | language | current [ language . to_s ] = current . delete ( language ) end end options end | Normalizes languages keys from symbols to strings |
318 | def resolution ( path ) return FastImage . size ( path ) if content_type ( path ) . start_with? ( "image" ) return video_resolution ( path ) if content_type ( path ) . start_with? ( "video" ) raise "Cannot find resolution of file #{path}" end | Identifies the resolution of a video or an image . Supports all video and images required by DU - UTC right now |
319 | def run_action_requiring_special_handling ( command : nil , parameter_map : nil , action_return_type : nil ) action_return = nil closure_argument_value = nil case command . method_name when "sh" error_callback = proc { | string_value | closure_argument_value = string_value } command_param = parameter_map [ :command ] log_param = parameter_map [ :log ] action_return = Fastlane :: FastFile . sh ( command_param , log : log_param , error_callback : error_callback ) end command_return = ActionCommandReturn . new ( return_value : action_return , return_value_type : action_return_type , closure_argument_value : closure_argument_value ) return command_return end | Some actions have special handling in fast_file . rb that means we can t directly call the action but we have to use the same logic that is in fast_file . rb instead . That s where this switch statement comes into play |
320 | def build_app command = BuildCommandGenerator . generate print_command ( command , "Generated Build Command" ) if FastlaneCore :: Globals . verbose? FastlaneCore :: CommandExecutor . execute ( command : command , print_all : true , print_command : ! Gym . config [ :silent ] , error : proc do | output | ErrorHandler . handle_build_error ( output ) end ) mark_archive_as_built_by_gym ( BuildCommandGenerator . archive_path ) UI . success ( "Successfully stored the archive. You can find it in the Xcode Organizer." ) unless Gym . config [ :archive_path ] . nil? UI . verbose ( "Stored the archive in: " + BuildCommandGenerator . archive_path ) post_build_app end | Builds the app and prepares the archive |
321 | def post_build_app command = BuildCommandGenerator . post_build return if command . empty? print_command ( command , "Generated Post-Build Command" ) if FastlaneCore :: Globals . verbose? FastlaneCore :: CommandExecutor . execute ( command : command , print_all : true , print_command : ! Gym . config [ :silent ] , error : proc do | output | ErrorHandler . handle_build_error ( output ) end ) end | Post - processing of build_app |
322 | def move_ipa FileUtils . mv ( PackageCommandGenerator . ipa_path , File . expand_path ( Gym . config [ :output_directory ] ) , force : true ) ipa_path = File . expand_path ( File . join ( Gym . config [ :output_directory ] , File . basename ( PackageCommandGenerator . ipa_path ) ) ) UI . success ( "Successfully exported and signed the ipa file:" ) UI . message ( ipa_path ) ipa_path end | Moves over the binary and dsym file to the output directory |
323 | def copy_mac_app exe_name = Gym . project . build_settings ( key : "EXECUTABLE_NAME" ) app_path = File . join ( BuildCommandGenerator . archive_path , "Products/Applications/#{exe_name}.app" ) UI . crash! ( "Couldn't find application in '#{BuildCommandGenerator.archive_path}'" ) unless File . exist? ( app_path ) FileUtils . cp_r ( app_path , File . expand_path ( Gym . config [ :output_directory ] ) , remove_destination : true ) app_path = File . join ( Gym . config [ :output_directory ] , File . basename ( app_path ) ) UI . success ( "Successfully exported the .app file:" ) UI . message ( app_path ) app_path end | Copies the . app from the archive into the output directory |
324 | def move_manifest if File . exist? ( PackageCommandGenerator . manifest_path ) FileUtils . mv ( PackageCommandGenerator . manifest_path , File . expand_path ( Gym . config [ :output_directory ] ) , force : true ) manifest_path = File . join ( File . expand_path ( Gym . config [ :output_directory ] ) , File . basename ( PackageCommandGenerator . manifest_path ) ) UI . success ( "Successfully exported the manifest.plist file:" ) UI . message ( manifest_path ) manifest_path end end | Move the manifest . plist if exists into the output directory |
325 | def move_app_thinning if File . exist? ( PackageCommandGenerator . app_thinning_path ) FileUtils . mv ( PackageCommandGenerator . app_thinning_path , File . expand_path ( Gym . config [ :output_directory ] ) , force : true ) app_thinning_path = File . join ( File . expand_path ( Gym . config [ :output_directory ] ) , File . basename ( PackageCommandGenerator . app_thinning_path ) ) UI . success ( "Successfully exported the app-thinning.plist file:" ) UI . message ( app_thinning_path ) app_thinning_path end end | Move the app - thinning . plist file into the output directory |
326 | def move_app_thinning_size_report if File . exist? ( PackageCommandGenerator . app_thinning_size_report_path ) FileUtils . mv ( PackageCommandGenerator . app_thinning_size_report_path , File . expand_path ( Gym . config [ :output_directory ] ) , force : true ) app_thinning_size_report_path = File . join ( File . expand_path ( Gym . config [ :output_directory ] ) , File . basename ( PackageCommandGenerator . app_thinning_size_report_path ) ) UI . success ( "Successfully exported the App Thinning Size Report.txt file:" ) UI . message ( app_thinning_size_report_path ) app_thinning_size_report_path end end | Move the App Thinning Size Report . txt file into the output directory |
327 | def move_apps_folder if Dir . exist? ( PackageCommandGenerator . apps_path ) FileUtils . mv ( PackageCommandGenerator . apps_path , File . expand_path ( Gym . config [ :output_directory ] ) , force : true ) apps_path = File . join ( File . expand_path ( Gym . config [ :output_directory ] ) , File . basename ( PackageCommandGenerator . apps_path ) ) UI . success ( "Successfully exported Apps folder:" ) UI . message ( apps_path ) apps_path end end | Move the Apps folder to the output directory |
328 | def determine_api_version ( new_file_content : nil , old_file_content : nil ) unless old_file_content . length >= new_file_content . length old_api_version = find_api_version_string ( content : old_file_content ) return DEFAULT_API_VERSION_STRING if old_api_version . nil? return increment_api_version_string ( api_version_string : old_api_version ) end relevant_old_file_content = old_file_content [ 0 .. ( new_file_content . length - 1 ) ] if relevant_old_file_content == new_file_content return find_api_version_string ( content : old_file_content ) else old_api_version = find_api_version_string ( content : old_file_content ) return DEFAULT_API_VERSION_STRING if old_api_version . nil? return increment_api_version_string ( api_version_string : old_api_version ) end end | compares the new file content to the old and figures out what api_version the new content should be |
329 | def increment_api_version_string ( api_version_string : nil , increment_by : :patch ) versions = api_version_string . split ( "." ) major = versions [ 0 ] . to_i minor = versions [ 1 ] . to_i patch = versions [ 2 ] . to_i case increment_by when :patch patch += 1 when :minor minor += 1 patch = 0 when :major major += 1 minor = 0 patch = 0 end new_version_string = [ major , minor , patch ] . join ( "." ) return new_version_string end | expects format to be X . Y . Z where each value is a number |
330 | def for_lane ( lane_name ) if lane_name . to_s . split ( " " ) . count > 1 puts ( "You use deprecated syntax '#{lane_name}' in your Appfile." . yellow ) puts ( "Please follow the Appfile guide: https://docs.fastlane.tools/advanced/#appfile" . yellow ) platform , lane_name = lane_name . split ( " " ) return unless platform == ENV [ "FASTLANE_PLATFORM_NAME" ] end if ENV [ "FASTLANE_LANE_NAME" ] == lane_name . to_s yield end end | Override Appfile configuration for a specific lane . |
331 | def file_needs_update? ( filename : nil ) regex_to_use = API_VERSION_REGEX source = File . join ( self . source_swift_code_file_folder_path , "/#{filename}" ) target = File . join ( self . target_swift_code_file_folder_path , "/#{filename}" ) return true unless File . exist? ( target ) source_file_content = File . read ( source ) target_file_content = File . read ( target ) bundled_version = source_file_content . match ( regex_to_use ) [ 1 ] target_version = target_file_content . match ( regex_to_use ) [ 1 ] file_versions_are_different = bundled_version != target_version UI . verbose ( "#{filename} FastlaneRunnerAPIVersion (bundled/target): #{bundled_version}/#{target_version}" ) files_are_different = source_file_content != target_file_content if files_are_different && ! file_versions_are_different UI . verbose ( "File versions are the same, but the two files are not equal, so that's a problem, setting needs update to 'true'" ) end needs_update = file_versions_are_different || files_are_different return needs_update end | compares source file against the target file s FastlaneRunnerAPIVersion and returned true if there is a difference |
332 | def copy_file_if_needed! ( filename : nil , dry_run : false ) needs_update = file_needs_update? ( filename : filename ) UI . verbose ( "file #{filename} needs an update" ) if needs_update return needs_update if dry_run unless needs_update return false end source = File . join ( self . source_swift_code_file_folder_path , "/#{filename}" ) target = File . join ( self . target_swift_code_file_folder_path , "/#{filename}" ) FileUtils . cp ( source , target ) UI . verbose ( "Copied #{source} to #{target}" ) return true end | currently just copies file even if not needed . |
333 | def serve @server_transport . listen begin loop do @thread_q . push ( :token ) Thread . new do begin loop do client = @server_transport . accept trans = @transport_factory . get_transport ( client ) prot = @protocol_factory . get_protocol ( trans ) begin loop do @processor . process ( prot , prot ) end rescue Thrift :: TransportException , Thrift :: ProtocolException => e ensure trans . close end end rescue => e @exception_q . push ( e ) ensure @thread_q . pop end end end ensure @server_transport . close end end | exceptions that happen in worker threads simply cause that thread to die and another to be spawned in its place . |
334 | def write_json_char ( ch ) kJSONCharTable = [ 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 'b' , 't' , 'n' , 0 , 'f' , 'r' , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 1 , 1 , '"' , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , ] ch_value = ch [ 0 ] if ( ch_value . kind_of? String ) ch_value = ch . bytes . first end if ( ch_value >= 0x30 ) if ( ch == @@kJSONBackslash ) trans . write ( @@kJSONBackslash ) trans . write ( @@kJSONBackslash ) else trans . write ( ch ) end else outCh = kJSONCharTable [ ch_value ] ; if outCh . kind_of? String trans . write ( @@kJSONBackslash ) trans . write ( outCh ) elsif outCh == 1 trans . write ( ch ) else write_json_escape_char ( ch ) end end end | Write the character ch as part of a JSON string escaping as appropriate . |
335 | def write_json_string ( str ) @context . write ( trans ) trans . write ( @@kJSONStringDelimiter ) str . split ( '' ) . each do | ch | write_json_char ( ch ) end trans . write ( @@kJSONStringDelimiter ) end | Write out the contents of the string str as a JSON string escaping characters as appropriate . |
336 | def write_json_base64 ( str ) @context . write ( trans ) trans . write ( @@kJSONStringDelimiter ) trans . write ( Base64 . strict_encode64 ( str ) ) trans . write ( @@kJSONStringDelimiter ) end | Write out the contents of the string as JSON string base64 - encoding the string s contents and escaping as appropriate |
337 | def write_json_double ( num ) @context . write ( trans ) special = false ; if ( num . nan? ) special = true ; val = @@kThriftNan ; elsif ( num . infinite? ) special = true ; val = @@kThriftInfinity ; if ( num < 0.0 ) val = @@kThriftNegativeInfinity ; end else val = num . to_s end escapeNum = special || @context . escapeNum if ( escapeNum ) trans . write ( @@kJSONStringDelimiter ) end trans . write ( val ) if ( escapeNum ) trans . write ( @@kJSONStringDelimiter ) end end | Convert the given double to a JSON string which is either the number NaN or Infinity or - Infinity . |
338 | def read_json_escape_char str = @reader . read str += @reader . read str += @reader . read str += @reader . read if RUBY_VERSION >= '1.9' str . hex . chr ( Encoding :: UTF_8 ) else str . hex . chr end end | Decodes the four hex parts of a JSON escaped string character and returns the character via out . |
339 | def read_json_string ( skipContext = false ) escape_chars = "\"\\/bfnrt" escape_char_vals = [ "\"" , "\\" , "\/" , "\b" , "\f" , "\n" , "\r" , "\t" , ] if ! skipContext @context . read ( @reader ) end read_json_syntax_char ( @@kJSONStringDelimiter ) ch = "" str = "" while ( true ) ch = @reader . read if ( ch == @@kJSONStringDelimiter ) break end if ( ch == @@kJSONBackslash ) ch = @reader . read if ( ch == 'u' ) ch = read_json_escape_char else pos = escape_chars . index ( ch ) ; if ( pos . nil? ) raise ProtocolException . new ( ProtocolException :: INVALID_DATA , "Expected control char, got \'#{ch}\'." ) end ch = escape_char_vals [ pos ] end end str += ch end return str end | Decodes a JSON string including unescaping and returns the string via str |
340 | def read_json_base64 str = read_json_string m = str . length % 4 if m != 0 ( 4 - m ) . times do str += '=' end end Base64 . strict_decode64 ( str ) end | Reads a block of base64 characters decoding it and returns via str |
341 | def read_json_numeric_chars str = "" while ( true ) ch = @reader . peek if ( ! is_json_numeric ( ch ) ) break ; end ch = @reader . read str += ch end return str end | Reads a sequence of characters stopping at the first one that is not a valid JSON numeric character . |
342 | def read_json_integer @context . read ( @reader ) if ( @context . escapeNum ) read_json_syntax_char ( @@kJSONStringDelimiter ) end str = read_json_numeric_chars begin num = Integer ( str ) ; rescue raise ProtocolException . new ( ProtocolException :: INVALID_DATA , "Expected numeric value; got \"#{str}\"" ) end if ( @context . escapeNum ) read_json_syntax_char ( @@kJSONStringDelimiter ) end return num end | Reads a sequence of characters and assembles them into a number returning them via num |
343 | def read_json_double @context . read ( @reader ) num = 0 if ( @reader . peek == @@kJSONStringDelimiter ) str = read_json_string ( true ) if ( str == @@kThriftNan ) num = ( + 1.0 / 0.0 ) / ( + 1.0 / 0.0 ) elsif ( str == @@kThriftInfinity ) num = + 1.0 / 0.0 elsif ( str == @@kThriftNegativeInfinity ) num = - 1.0 / 0.0 else if ( ! @context . escapeNum ) raise ProtocolException . new ( ProtocolException :: INVALID_DATA , "Numeric data unexpectedly quoted" ) end begin num = Float ( str ) rescue raise ProtocolException . new ( ProtocolException :: INVALID_DATA , "Expected numeric value; got \"#{str}\"" ) end end else if ( @context . escapeNum ) read_json_syntax_char ( @@kJSONStringDelimiter ) end str = read_json_numeric_chars begin num = Float ( str ) rescue raise ProtocolException . new ( ProtocolException :: INVALID_DATA , "Expected numeric value; got \"#{str}\"" ) end end return num end | Reads a JSON number or string and interprets it as a double . |
344 | def write_field ( * args ) if args . size == 3 field_info = args [ 0 ] fid = args [ 1 ] value = args [ 2 ] elsif args . size == 4 field_info = { :name => args [ 0 ] , :type => args [ 1 ] } fid = args [ 2 ] value = args [ 3 ] else raise ArgumentError , "wrong number of arguments (#{args.size} for 3)" end write_field_begin ( field_info [ :name ] , field_info [ :type ] , fid ) write_type ( field_info , value ) write_field_end end | Writes a field based on the field information field ID and value . |
345 | def write_type ( field_info , value ) if field_info . is_a? Fixnum field_info = { :type => field_info } end case field_info [ :type ] when Types :: BOOL write_bool ( value ) when Types :: BYTE write_byte ( value ) when Types :: DOUBLE write_double ( value ) when Types :: I16 write_i16 ( value ) when Types :: I32 write_i32 ( value ) when Types :: I64 write_i64 ( value ) when Types :: STRING if field_info [ :binary ] write_binary ( value ) else write_string ( value ) end when Types :: STRUCT value . write ( self ) else raise NotImplementedError end end | Writes a field value based on the field information . |
346 | def read_type ( field_info ) if field_info . is_a? Fixnum field_info = { :type => field_info } end case field_info [ :type ] when Types :: BOOL read_bool when Types :: BYTE read_byte when Types :: DOUBLE read_double when Types :: I16 read_i16 when Types :: I32 read_i32 when Types :: I64 read_i64 when Types :: STRING if field_info [ :binary ] read_binary else read_string end else raise NotImplementedError end end | Reads a field value based on the field information . |
347 | def write_field_begin_internal ( type , id , type_override = nil ) last_id = @last_field . pop typeToWrite = type_override || CompactTypes . get_compact_type ( type ) if id > last_id && id - last_id <= 15 write_byte ( ( id - last_id ) << 4 | typeToWrite ) else write_byte ( typeToWrite ) write_i16 ( id ) end @last_field . push ( id ) nil end | The workhorse of writeFieldBegin . It has the option of doing a type override of the type header . This is used specifically in the boolean field case . |
348 | def write_collection_begin ( elem_type , size ) if size <= 14 write_byte ( size << 4 | CompactTypes . get_compact_type ( elem_type ) ) else write_byte ( 0xf0 | CompactTypes . get_compact_type ( elem_type ) ) write_varint32 ( size ) end end | Abstract method for writing the start of lists and sets . List and sets on the wire differ only by the type indicator . |
349 | def read_into_buffer ( buffer , size ) i = 0 while i < size if @index >= @rbuf . size @rbuf = @transport . read ( DEFAULT_BUFFER ) @index = 0 end byte = Bytes . get_string_byte ( @rbuf , @index ) Bytes . set_string_byte ( buffer , i , byte ) @index += 1 i += 1 end i end | Reads a number of bytes from the transport into the buffer passed . |
350 | def fibonacci ( n ) seq = [ 1 , 1 ] 3 . upto ( n ) do seq << seq [ - 1 ] + seq [ - 2 ] end seq [ n - 1 ] end | 1 - based index into the fibonacci sequence |
351 | def flush return @transport . flush unless @write out = [ @wbuf . length ] . pack ( 'N' ) out << @wbuf @transport . write ( out ) @transport . flush @wbuf = Bytes . empty_byte_buffer end | Writes the output buffer to the stream in the format of a 4 - byte length followed by the actual data . |
352 | def to_uri ( uri , base_uri = nil ) case base_uri when nil , '' Utils . normalize_uri ( uri . to_s ) else Utils . normalize_uri ( base_uri ) + Utils . normalize_uri ( uri . to_s ) end rescue URI :: Error nil end | Parse an input into a URI object optionally resolving it against a base URI if given . |
353 | def to_xpath ( string ) subs = string . to_s . scan ( / \G \A \z / ) . map { | x | case x when / / %Q{'#{x}'} else %Q{"#{x}"} end } if subs . size == 1 subs . first else 'concat(' << subs . join ( ', ' ) << ')' end end | Escape a string for use in XPath expression |
354 | def store_payload! ( old_events , result ) case interpolated [ 'mode' ] . presence when 'on_change' result_json = result . to_json if found = old_events . find { | event | event . payload . to_json == result_json } found . update! ( expires_at : new_event_expiration_date ) false else true end when 'all' , 'merge' , '' true else raise "Illegal options[mode]: #{interpolated['mode']}" end end | This method returns true if the result should be stored as a new event . If mode is set to on_change this method may return false and update an existing event to expire further in the future . |
355 | def run par = false if @allow_parallel par = true @logger . info ( "Enabling parallelization by default." ) end if par @actions . each do | machine , _ , _ | if ! machine . provider_options [ :parallel ] @logger . info ( "Disabling parallelization because provider doesn't support it: #{machine.provider_name}" ) par = false break end end end if par && @actions . length <= 1 @logger . info ( "Disabling parallelization because only executing one action" ) par = false end @logger . info ( "Batch action will parallelize: #{par.inspect}" ) threads = [ ] @actions . each do | machine , action , options | @logger . info ( "Starting action: #{machine} #{action} #{options}" ) thread = Thread . new do Thread . current [ :error ] = nil start_pid = Process . pid begin if action . is_a? ( Proc ) action . call ( machine ) else machine . send ( :action , action , options ) end rescue Exception => e raise if ! par && Process . pid == start_pid Thread . current [ :error ] = e if Process . pid == start_pid machine . ui . error ( I18n . t ( "vagrant.general.batch_notify_error" ) ) end end if Process . pid != start_pid exit_status = true if Thread . current [ :error ] exit_status = false error = Thread . current [ :error ] @logger . error ( error . inspect ) @logger . error ( error . message ) @logger . error ( error . backtrace . join ( "\n" ) ) end Process . exit! ( exit_status ) end end thread [ :machine ] = machine if ! par thread . join ( THREAD_MAX_JOIN_TIMEOUT ) while thread . alive? end threads << thread end errors = [ ] threads . each do | thread | thread . join ( THREAD_MAX_JOIN_TIMEOUT ) while thread . alive? if thread [ :error ] e = thread [ :error ] if ! thread [ :error ] . is_a? ( Errors :: VagrantError ) e = thread [ :error ] message = e . message message += "\n" message += "\n#{e.backtrace.join("\n")}" errors << I18n . t ( "vagrant.general.batch_unexpected_error" , machine : thread [ :machine ] . name , message : message ) else errors << I18n . t ( "vagrant.general.batch_vagrant_error" , machine : thread [ :machine ] . name , message : thread [ :error ] . message ) end end end if ! errors . empty? raise Errors :: BatchMultiError , message : errors . join ( "\n\n" ) end end | Run all the queued up actions parallelizing if possible . |
356 | def get ( key ) return nil if ! @items . key? ( key ) return @results_cache [ key ] if @results_cache . key? ( key ) @results_cache [ key ] = @items [ key ] . call end | Get a value by the given key . |
357 | def merge ( other ) self . class . new . tap do | result | result . merge! ( self ) result . merge! ( other ) end end | Merge one registry with another and return a completely new registry . Note that the result cache is completely busted so any gets on the new registry will result in a cache miss . |
358 | def init! ( plugins , repair = false ) if ! @initial_specifications @initial_specifications = Gem :: Specification . find_all { true } else Gem :: Specification . all = @initial_specifications Gem :: Specification . reset end if ! Gem . sources . include? ( HASHICORP_GEMSTORE ) current_sources = Gem . sources . sources . dup Gem . sources . clear Gem . sources << HASHICORP_GEMSTORE current_sources . each do | src | Gem . sources << src end end plugin_deps = plugins . map do | name , info | Gem :: Dependency . new ( name , info [ 'installed_gem_version' ] . to_s . empty? ? '> 0' : info [ 'installed_gem_version' ] ) end @logger . debug ( "Current generated plugin dependency list: #{plugin_deps}" ) request_set = Gem :: RequestSet . new ( * plugin_deps ) request_set . remote = false repair_result = nil begin composed_set = generate_vagrant_set solution = request_set . resolve ( composed_set ) rescue Gem :: UnsatisfiableDependencyError => failure if repair raise failure if @init_retried @logger . debug ( "Resolution failed but attempting to repair. Failure: #{failure}" ) install ( plugins ) @init_retried = true retry else raise end end activate_solution ( solution ) full_vagrant_spec_list = @initial_specifications + solution . map ( & :full_spec ) if ( defined? ( :: Bundler ) ) @logger . debug ( "Updating Bundler with full specification list" ) :: Bundler . rubygems . replace_entrypoints ( full_vagrant_spec_list ) end Gem . post_reset do Gem :: Specification . all = full_vagrant_spec_list end Gem :: Specification . reset nil end | Initializes Bundler and the various gem paths so that we can begin loading gems . |
359 | def update ( plugins , specific , ** opts ) specific ||= [ ] update = opts . merge ( { gems : specific . empty? ? true : specific } ) internal_install ( plugins , update ) end | Update updates the given plugins or every plugin if none is given . |
360 | def clean ( plugins , ** opts ) @logger . debug ( "Cleaning Vagrant plugins of stale gems." ) plugin_deps = plugins . map do | name , info | gem_version = info [ 'installed_gem_version' ] gem_version = info [ 'gem_version' ] if gem_version . to_s . empty? gem_version = "> 0" if gem_version . to_s . empty? Gem :: Dependency . new ( name , gem_version ) end @logger . debug ( "Current plugin dependency list: #{plugin_deps}" ) request_set = Gem :: RequestSet . new ( * plugin_deps ) request_set . remote = false current_set = generate_vagrant_set plugin_specs = Dir . glob ( plugin_gem_path . join ( 'specifications/*.gemspec' ) . to_s ) . map do | spec_path | Gem :: Specification . load ( spec_path ) end if env_plugin_gem_path plugin_specs += Dir . glob ( env_plugin_gem_path . join ( 'specifications/*.gemspec' ) . to_s ) . map do | spec_path | Gem :: Specification . load ( spec_path ) end end @logger . debug ( "Generating current plugin state solution set." ) solution = request_set . resolve ( current_set ) solution_specs = solution . map ( & :full_spec ) solution_full_names = solution_specs . map ( & :full_name ) plugin_specs . delete_if do | spec | solution_full_names . include? ( spec . full_name ) end if env_plugin_gem_path if opts [ :env_local ] @logger . debug ( "Removing specifications that are not environment local" ) plugin_specs . delete_if do | spec | spec . full_gem_path . to_s . include? ( plugin_gem_path . realpath . to_s ) end else @logger . debug ( "Removing specifications that are environment local" ) plugin_specs . delete_if do | spec | spec . full_gem_path . to_s . include? ( env_plugin_gem_path . realpath . to_s ) end end end @logger . debug ( "Specifications to be removed - #{plugin_specs.map(&:full_name)}" ) plugin_specs . each do | spec | @logger . debug ( "Uninstalling gem - #{spec.full_name}" ) Gem :: Uninstaller . new ( spec . name , version : spec . version , install_dir : plugin_gem_path , all : true , executables : true , force : true , ignore : true , ) . uninstall_gem ( spec ) end solution . find_all do | spec | plugins . keys . include? ( spec . name ) end end | Clean removes any unused gems . |
361 | def validate_configured_sources! Gem . sources . each_source do | src | begin src . load_specs ( :released ) rescue Gem :: Exception => source_error if ENV [ "VAGRANT_ALLOW_PLUGIN_SOURCE_ERRORS" ] @logger . warn ( "Failed to load configured plugin source: #{src}!" ) @logger . warn ( "Error received attempting to load source (#{src}): #{source_error}" ) @logger . warn ( "Ignoring plugin source load failure due user request via env variable" ) else @logger . error ( "Failed to load configured plugin source `#{src}`: #{source_error}" ) raise Vagrant :: Errors :: PluginSourceError , source : src . uri . to_s , error_msg : source_error . message end end end end | Iterates each configured RubyGem source to validate that it is properly available . If source is unavailable an exception is raised . |
362 | def generate_builtin_set ( system_plugins = [ ] ) builtin_set = BuiltinSet . new @logger . debug ( "Generating new builtin set instance." ) vagrant_internal_specs . each do | spec | if ! system_plugins . include? ( spec . name ) builtin_set . add_builtin_spec ( spec ) end end builtin_set end | Generate the builtin resolver set |
363 | def activate_solution ( solution ) retried = false begin @logger . debug ( "Activating solution set: #{solution.map(&:full_name)}" ) solution . each do | activation_request | unless activation_request . full_spec . activated? @logger . debug ( "Activating gem #{activation_request.full_spec.full_name}" ) activation_request . full_spec . activate if ( defined? ( :: Bundler ) ) @logger . debug ( "Marking gem #{activation_request.full_spec.full_name} loaded within Bundler." ) :: Bundler . rubygems . mark_loaded activation_request . full_spec end end end rescue Gem :: LoadError => e if retried @logger . error ( "Failed to load solution set - #{e.class}: #{e}" ) matcher = e . message . match ( / / ) if matcher && ! matcher [ "gem_name" ] . empty? desired_activation_request = solution . detect do | request | request . name == matcher [ "gem_name" ] end if desired_activation_request && ! desired_activation_request . full_spec . activated? activation_request = desired_activation_request @logger . warn ( "Found misordered activation request for #{desired_activation_request.full_name}. Moving to solution HEAD." ) solution . delete ( desired_activation_request ) solution . unshift ( desired_activation_request ) retry end end raise else @logger . debug ( "Failed to load solution set. Retrying with reverse order." ) retried = true solution . reverse! retry end end end | Activate a given solution |
364 | def delete ( entry ) return true if ! entry . id @lock . synchronize do with_index_lock do return true if ! @machines [ entry . id ] if ! @machine_locks [ entry . id ] raise "Unlocked delete on machine: #{entry.id}" end unlocked_reload @machines . delete ( entry . id ) unlocked_save unlocked_release ( entry . id ) end end true end | Initializes a MachineIndex at the given file location . |
365 | def find_by_prefix ( prefix ) @machines . each do | uuid , data | return data . merge ( "id" => uuid ) if uuid . start_with? ( prefix ) end nil end | Finds a machine where the UUID is prefixed by the given string . |
366 | def lock_machine ( uuid ) lock_path = @data_dir . join ( "#{uuid}.lock" ) lock_file = lock_path . open ( "w+" ) if lock_file . flock ( File :: LOCK_EX | File :: LOCK_NB ) === false lock_file . close lock_file = nil end lock_file end | Locks a machine exclusively to us returning the file handle that holds the lock . |
367 | def unlocked_release ( id ) lock_file = @machine_locks [ id ] if lock_file lock_file . close begin File . delete ( lock_file . path ) rescue Errno :: EACCES end @machine_locks . delete ( id ) end end | Releases a local lock on a machine . This does not acquire any locks so make sure to lock around it . |
368 | def unlocked_reload return if ! @index_file . file? data = nil begin data = JSON . load ( @index_file . read ) rescue JSON :: ParserError raise Errors :: CorruptMachineIndex , path : @index_file . to_s end if data if ! data [ "version" ] || data [ "version" ] . to_i != 1 raise Errors :: CorruptMachineIndex , path : @index_file . to_s end @machines = data [ "machines" ] || { } end end | This will reload the data without locking the index . It is assumed the caller with lock the index outside of this call . |
369 | def with_index_lock lock_path = "#{@index_file}.lock" File . open ( lock_path , "w+" ) do | f | f . flock ( File :: LOCK_EX ) yield end end | This will hold a lock to the index so it can be read or updated . |
370 | def version ( version , ** opts ) requirements = version . split ( "," ) . map do | v | Gem :: Requirement . new ( v . strip ) end providers = nil providers = Array ( opts [ :provider ] ) . map ( & :to_sym ) if opts [ :provider ] @version_map . keys . sort . reverse . each do | v | next if ! requirements . all? { | r | r . satisfied_by? ( v ) } version = Version . new ( @version_map [ v ] ) next if ( providers & version . providers ) . empty? if providers return version end nil end | Loads the metadata associated with the box from the given IO . |
371 | def help opts = OptionParser . new do | o | o . banner = "Usage: vagrant [options] <command> [<args>]" o . separator "" o . on ( "-v" , "--version" , "Print the version and exit." ) o . on ( "-h" , "--help" , "Print this help." ) o . separator "" o . separator "Common commands:" commands = { } longest = 0 Vagrant . plugin ( "2" ) . manager . commands . each do | key , data | next if ! data [ 1 ] [ :primary ] key = key . to_s klass = data [ 0 ] . call commands [ key ] = klass . synopsis longest = key . length if key . length > longest end commands . keys . sort . each do | key | o . separator " #{key.ljust(longest+2)} #{commands[key]}" @env . ui . machine ( "cli-command" , key . dup ) end o . separator "" o . separator "For help on any individual command run `vagrant COMMAND -h`" o . separator "" o . separator "Additional subcommands are available, but are either more advanced" o . separator "or not commonly used. To see all subcommands, run the command" o . separator "`vagrant list-commands`." end @env . ui . info ( opts . help , prefix : false ) end | This prints out the help for the CLI . |
372 | def action_runner @action_runner ||= Action :: Runner . new do { action_runner : action_runner , box_collection : boxes , hook : method ( :hook ) , host : host , machine_index : machine_index , gems_path : gems_path , home_path : home_path , root_path : root_path , tmp_path : tmp_path , ui : @ui , env : self } end end | Action runner for executing actions in the context of this environment . |
373 | def active_machines return [ ] if ! @local_data_path machine_folder = @local_data_path . join ( "machines" ) return [ ] if ! machine_folder . directory? result = [ ] machine_folder . children ( true ) . each do | name_folder | next if ! name_folder . directory? name = name_folder . basename . to_s . to_sym name_folder . children ( true ) . each do | provider_folder | next if ! provider_folder . directory? next if ! provider_folder . join ( "id" ) . file? provider = provider_folder . basename . to_s . to_sym result << [ name , provider ] end end result end | Returns a list of machines that this environment is currently managing that physically have been created . |
374 | def batch ( parallel = true ) parallel = false if ENV [ "VAGRANT_NO_PARALLEL" ] @batch_lock . synchronize do BatchAction . new ( parallel ) . tap do | b | yield b b . run end end end | This creates a new batch action yielding it and then running it once the block is called . |
375 | def hook ( name , opts = nil ) @logger . info ( "Running hook: #{name}" ) opts ||= { } opts [ :callable ] ||= Action :: Builder . new opts [ :runner ] ||= action_runner opts [ :action_name ] = name opts [ :env ] = self opts . delete ( :runner ) . run ( opts . delete ( :callable ) , opts ) end | This defines a hook point where plugin action hooks that are registered against the given name will be run in the context of this environment . |
376 | def host return @host if defined? ( @host ) host_klass = vagrantfile . config . vagrant . host host_klass = nil if host_klass == :detect begin @host = Host . new ( host_klass , Vagrant . plugin ( "2" ) . manager . hosts , Vagrant . plugin ( "2" ) . manager . host_capabilities , self ) rescue Errors :: CapabilityHostNotDetected klass = Class . new ( Vagrant . plugin ( "2" , :host ) ) do def detect? ( env ) ; true ; end end hosts = { generic : [ klass , nil ] } host_caps = { } @host = Host . new ( :generic , hosts , host_caps , self ) rescue Errors :: CapabilityHostExplicitNotDetected => e raise Errors :: HostExplicitNotDetected , e . extra_data end end | Returns the host object associated with this environment . |
377 | def lock ( name = "global" , ** opts ) f = nil return if ! block_given? return yield if @locks [ name ] || opts [ :noop ] lock_path = data_dir . join ( "lock.#{name}.lock" ) @logger . debug ( "Attempting to acquire process-lock: #{name}" ) lock ( "dotlock" , noop : name == "dotlock" , retry : true ) do f = File . open ( lock_path , "w+" ) end while f . flock ( File :: LOCK_EX | File :: LOCK_NB ) === false @logger . warn ( "Process-lock in use: #{name}" ) if ! opts [ :retry ] raise Errors :: EnvironmentLockedError , name : name end sleep 0.2 end @logger . info ( "Acquired process lock: #{name}" ) result = nil begin @locks [ name ] = true result = yield ensure @locks . delete ( name ) @logger . info ( "Released process lock: #{name}" ) end if name != "dotlock" lock ( "dotlock" , retry : true ) do f . close begin File . delete ( lock_path ) rescue @logger . error ( "Failed to delete lock file #{lock_path} - some other thread " + "might be trying to acquire it. ignoring this error" ) end end end return result ensure begin f . close if f rescue IOError end end | This acquires a process - level lock with the given name . |
378 | def push ( name ) @logger . info ( "Getting push: #{name}" ) name = name . to_sym pushes = self . vagrantfile . config . push . __compiled_pushes if ! pushes . key? ( name ) raise Vagrant :: Errors :: PushStrategyNotDefined , name : name , pushes : pushes . keys end strategy , config = pushes [ name ] push_registry = Vagrant . plugin ( "2" ) . manager . pushes klass , _ = push_registry . get ( strategy ) if klass . nil? raise Vagrant :: Errors :: PushStrategyNotLoaded , name : strategy , pushes : push_registry . keys end klass . new ( self , config ) . push end | This executes the push with the given name raising any exceptions that occur . |
379 | def machine ( name , provider , refresh = false ) @logger . info ( "Getting machine: #{name} (#{provider})" ) cache_key = [ name , provider ] @machines ||= { } if refresh @logger . info ( "Refreshing machine (busting cache): #{name} (#{provider})" ) @machines . delete ( cache_key ) end if @machines . key? ( cache_key ) @logger . info ( "Returning cached machine: #{name} (#{provider})" ) return @machines [ cache_key ] end @logger . info ( "Uncached load of machine." ) machine_data_path = @local_data_path . join ( "machines/#{name}/#{provider}" ) @machines [ cache_key ] = vagrantfile . machine ( name , provider , boxes , machine_data_path , self ) end | This returns a machine with the proper provider for this environment . The machine named by name must be in this environment . |
380 | def setup_local_data_path ( force = false ) if @local_data_path . nil? @logger . warn ( "No local data path is set. Local data cannot be stored." ) return end @logger . info ( "Local data path: #{@local_data_path}" ) if @local_data_path . file? upgrade_v1_dotfile ( @local_data_path ) end return if ! force && root_path . nil? begin @logger . debug ( "Creating: #{@local_data_path}" ) FileUtils . mkdir_p ( @local_data_path ) loader_file = @local_data_path . join ( "rgloader" , "loader.rb" ) if ! loader_file . file? source_loader = Vagrant . source_root . join ( "templates/rgloader.rb" ) FileUtils . mkdir_p ( @local_data_path . join ( "rgloader" ) . to_s ) FileUtils . cp ( source_loader . to_s , loader_file . to_s ) end rescue Errno :: EACCES raise Errors :: LocalDataDirectoryNotAccessible , local_data_path : @local_data_path . to_s end end | This creates the local data directory and show an error if it couldn t properly be created . |
381 | def process_configured_plugins return if ! Vagrant . plugins_enabled? errors = vagrantfile . config . vagrant . validate ( nil ) if ! errors [ "vagrant" ] . empty? raise Errors :: ConfigInvalid , errors : Util :: TemplateRenderer . render ( "config/validation_failed" , errors : errors ) end installed = Plugin :: Manager . instance . installed_plugins needs_install = [ ] config_plugins = vagrantfile . config . vagrant . plugins config_plugins . each do | name , info | if ! installed [ name ] needs_install << name end end if ! needs_install . empty? ui . warn ( I18n . t ( "vagrant.plugins.local.uninstalled_plugins" , plugins : needs_install . sort . join ( ", " ) ) ) if ! Vagrant . auto_install_local_plugins? answer = nil until [ "y" , "n" ] . include? ( answer ) answer = ui . ask ( I18n . t ( "vagrant.plugins.local.request_plugin_install" ) + " [N]: " ) answer = answer . strip . downcase answer = "n" if answer . to_s . empty? end if answer == "n" raise Errors :: PluginMissingLocalError , plugins : needs_install . sort . join ( ", " ) end end needs_install . each do | name | pconfig = Util :: HashWithIndifferentAccess . new ( config_plugins [ name ] ) ui . info ( I18n . t ( "vagrant.commands.plugin.installing" , name : name ) ) options = { sources : Vagrant :: Bundler :: DEFAULT_GEM_SOURCES . dup , env_local : true } options [ :sources ] = pconfig [ :sources ] if pconfig [ :sources ] options [ :require ] = pconfig [ :entry_point ] if pconfig [ :entry_point ] options [ :version ] = pconfig [ :version ] if pconfig [ :version ] spec = Plugin :: Manager . instance . install_plugin ( name , options ) ui . info ( I18n . t ( "vagrant.commands.plugin.installed" , name : spec . name , version : spec . version . to_s ) ) end ui . info ( "\n" ) ui . warn ( I18n . t ( "vagrant.plugins.local.install_rerun_command" ) ) exit ( - 1 ) end Vagrant :: Plugin :: Manager . instance . local_file . installed_plugins end | Check for any local plugins defined within the Vagrantfile . If found validate they are available . If they are not available request to install them or raise an exception |
382 | def copy_insecure_private_key if ! @default_private_key_path . exist? @logger . info ( "Copying private key to home directory" ) source = File . expand_path ( "keys/vagrant" , Vagrant . source_root ) destination = @default_private_key_path begin FileUtils . cp ( source , destination ) rescue Errno :: EACCES raise Errors :: CopyPrivateKeyFailed , source : source , destination : destination end end if ! Util :: Platform . windows? if Util :: FileMode . from_octal ( @default_private_key_path . stat . mode ) != "600" @logger . info ( "Changing permissions on private key to 0600" ) @default_private_key_path . chmod ( 0600 ) end end end | This method copies the private key into the home directory if it doesn t already exist . |
383 | def find_vagrantfile ( search_path , filenames = nil ) filenames ||= [ "Vagrantfile" , "vagrantfile" ] filenames . each do | vagrantfile | current_path = search_path . join ( vagrantfile ) return current_path if current_path . file? end nil end | Finds the Vagrantfile in the given directory . |
384 | def upgrade_home_path_v1_1 if ! ENV [ "VAGRANT_UPGRADE_SILENT_1_5" ] @ui . ask ( I18n . t ( "vagrant.upgrading_home_path_v1_5" ) ) end collection = BoxCollection . new ( @home_path . join ( "boxes" ) , temp_dir_root : tmp_path ) collection . upgrade_v1_1_v1_5 end | This upgrades a home directory that was in the v1 . 1 format to the v1 . 5 format . It will raise exceptions if anything fails . |
385 | def upgrade_v1_dotfile ( path ) @logger . info ( "Upgrading V1 dotfile to V2 directory structure..." ) contents = path . read . strip if contents . strip == "" @logger . info ( "V1 dotfile was empty. Removing and moving on." ) path . delete return end @logger . debug ( "Attempting to parse JSON of V1 file" ) json_data = nil begin json_data = JSON . parse ( contents ) @logger . debug ( "JSON parsed successfully. Things are okay." ) rescue JSON :: ParserError raise Errors :: DotfileUpgradeJSONError , state_file : path . to_s end backup_file = path . dirname . join ( ".vagrant.v1.#{Time.now.to_i}" ) @logger . info ( "Renaming old dotfile to: #{backup_file}" ) path . rename ( backup_file ) setup_local_data_path ( true ) if json_data [ "active" ] @logger . debug ( "Upgrading to V2 style for each active VM" ) json_data [ "active" ] . each do | name , id | @logger . info ( "Upgrading dotfile: #{name} (#{id})" ) directory = @local_data_path . join ( "machines/#{name}/virtualbox" ) FileUtils . mkdir_p ( directory ) directory . join ( "id" ) . open ( "w+" ) do | f | f . write ( id ) end end end @ui . info ( I18n . t ( "vagrant.general.upgraded_v1_dotfile" , backup_path : backup_file . to_s ) ) end | This upgrades a Vagrant 1 . 0 . x dotfile to the new V2 format . |
386 | def detect! guest_name = @machine . config . vm . guest initialize_capabilities! ( guest_name , @guests , @capabilities , @machine ) rescue Errors :: CapabilityHostExplicitNotDetected => e raise Errors :: GuestExplicitNotDetected , value : e . extra_data [ :value ] rescue Errors :: CapabilityHostNotDetected raise Errors :: GuestNotDetected end | This will detect the proper guest OS for the machine and set up the class to actually execute capabilities . |
387 | def machine ( name , provider , boxes , data_path , env ) results = machine_config ( name , provider , boxes , data_path ) box = results [ :box ] config = results [ :config ] config_errors = results [ :config_errors ] config_warnings = results [ :config_warnings ] provider_cls = results [ :provider_cls ] provider_options = results [ :provider_options ] if ! config_warnings . empty? || ! config_errors . empty? level = config_errors . empty? ? :warn : :error output = Util :: TemplateRenderer . render ( "config/messages" , warnings : config_warnings , errors : config_errors ) . chomp env . ui . send ( level , I18n . t ( "vagrant.general.config_upgrade_messages" , name : name , output : output ) ) raise Errors :: ConfigUpgradeErrors if ! config_errors . empty? end provider_config = config . vm . get_provider_config ( provider ) FileUtils . mkdir_p ( data_path ) return Machine . new ( name , provider , provider_cls , provider_config , provider_options , config , data_path , box , env , self ) end | Initializes by loading a Vagrantfile . |
388 | def machine_names_and_options { } . tap do | r | @config . vm . defined_vms . each do | name , subvm | r [ name ] = subvm . options || { } end end end | Returns a list of the machine names as well as the options that were specified for that machine . |
389 | def primary_machine_name return machine_names . first if machine_names . length == 1 @config . vm . defined_vms . each do | name , subvm | return name if subvm . options [ :primary ] end nil end | Returns the name of the machine that is designated as the primary . |
390 | def action ( name , opts = nil ) @triggers . fire_triggers ( name , :before , @name . to_s , :action ) @logger . info ( "Calling action: #{name} on provider #{@provider}" ) opts ||= { } lock = true lock = opts . delete ( :lock ) if opts . key? ( :lock ) extra_env = opts . dup extra_env [ :trigger_env ] = @env check_cwd vf = nil vf = @env . vagrantfile_name [ 0 ] if @env . vagrantfile_name id = Digest :: MD5 . hexdigest ( "#{@env.root_path}#{vf}#{@env.local_data_path}#{@name}" ) locker = Proc . new { | * args , & block | block . call } locker = @env . method ( :lock ) if lock && ! name . to_s . start_with? ( "ssh" ) return_env = locker . call ( "machine-action-#{id}" ) do callable = @provider . action ( name ) if callable . nil? raise Errors :: UnimplementedProviderAction , action : name , provider : @provider . to_s end ui . machine ( "action" , name . to_s , "start" ) action_result = action_raw ( name , callable , extra_env ) ui . machine ( "action" , name . to_s , "end" ) action_result end @triggers . fire_triggers ( name , :after , @name . to_s , :action ) return return_env rescue Errors :: EnvironmentLockedError raise Errors :: MachineActionLockedError , action : name , name : @name end | Initialize a new machine . |
391 | def action_raw ( name , callable , extra_env = nil ) env = { action_name : "machine_action_#{name}" . to_sym , machine : self , machine_action : name , ui : @ui , } . merge ( extra_env || { } ) @env . action_runner . run ( callable , env ) end | This calls a raw callable in the proper context of the machine using the middleware stack . |
392 | def id = ( value ) @logger . info ( "New machine ID: #{value.inspect}" ) id_file = nil if @data_dir id_file = @data_dir . join ( "id" ) end if value if id_file id_file . open ( "w+" ) do | f | f . write ( value ) end end if uid_file uid_file . open ( "w+" ) do | f | f . write ( Process . uid . to_s ) end end if index_uuid . nil? entry = MachineIndex :: Entry . new entry . local_data_path = @env . local_data_path entry . name = @name . to_s entry . provider = @provider_name . to_s entry . state = "preparing" entry . vagrantfile_path = @env . root_path entry . vagrantfile_name = @env . vagrantfile_name if @box entry . extra_data [ "box" ] = { "name" => @box . name , "provider" => @box . provider . to_s , "version" => @box . version . to_s , } end entry = @env . machine_index . set ( entry ) @env . machine_index . release ( entry ) if @index_uuid_file @index_uuid_file . open ( "w+" ) do | f | f . write ( entry . id ) end end end else id_file . delete if id_file && id_file . file? uid_file . delete if uid_file && uid_file . file? uuid = index_uuid if uuid entry = @env . machine_index . get ( uuid ) @env . machine_index . delete ( entry ) if entry end if @data_dir @data_dir . children . each do | child | begin child . rmtree rescue Errno :: EACCES @logger . info ( "EACCESS deleting file: #{child}" ) end end end end @id = value . nil? ? nil : value . to_s @provider . machine_id_changed end | This sets the unique ID associated with this machine . This will persist this ID so that in the future Vagrant will be able to find this machine again . The unique ID must be absolutely unique to the virtual machine and can be used by providers for finding the actual machine associated with this instance . |
393 | def reload old_id = @id @id = nil if @data_dir id_file = @data_dir . join ( "id" ) id_content = id_file . read . strip if id_file . file? if ! id_content . to_s . empty? @id = id_content end end if @id != old_id && @provider @provider . machine_id_changed end @id end | This reloads the ID of the underlying machine . |
394 | def state result = @provider . state raise Errors :: MachineStateInvalid if ! result . is_a? ( MachineState ) uuid = index_uuid if uuid @state_mutex . synchronize do entry = @env . machine_index . get ( uuid ) if entry entry . state = result . short_description @env . machine_index . set ( entry ) @env . machine_index . release ( entry ) end end end result end | Returns the state of this machine . The state is queried from the backing provider so it can be any arbitrary symbol . |
395 | def check_cwd desired_encoding = @env . root_path . to_s . encoding vagrant_cwd_filepath = @data_dir . join ( 'vagrant_cwd' ) vagrant_cwd = if File . exist? ( vagrant_cwd_filepath ) File . read ( vagrant_cwd_filepath , external_encoding : desired_encoding ) . chomp end if ! File . identical? ( vagrant_cwd . to_s , @env . root_path . to_s ) if vagrant_cwd ui . warn ( I18n . t ( 'vagrant.moved_cwd' , old_wd : "#{vagrant_cwd}" , current_wd : "#{@env.root_path.to_s}" ) ) end File . write ( vagrant_cwd_filepath , @env . root_path . to_s , external_encoding : desired_encoding ) end end | Checks the current directory for a given machine and displays a warning if that machine has moved from its previous location on disk . If the machine has moved it prints a warning to the user . |
396 | def all results = [ ] with_collection_lock do @logger . debug ( "Finding all boxes in: #{@directory}" ) @directory . children ( true ) . each do | child | next if ! child . directory? box_name = undir_name ( child . basename . to_s ) child . children ( true ) . each do | versiondir | next if ! versiondir . directory? next if versiondir . basename . to_s . start_with? ( "." ) version = versiondir . basename . to_s versiondir . children ( true ) . each do | provider | if ! Gem :: Version . correct? ( version ) ui = Vagrant :: UI :: Prefixed . new ( Vagrant :: UI :: Colored . new , "vagrant" ) ui . warn ( I18n . t ( "vagrant.box_version_malformed" , version : version , box_name : box_name ) ) @logger . debug ( "Invalid version #{version} for box #{box_name}" ) next end if provider . directory? && provider . join ( "metadata.json" ) . file? provider_name = provider . basename . to_s . to_sym @logger . debug ( "Box: #{box_name} (#{provider_name}, #{version})" ) results << [ box_name , version , provider_name ] else @logger . debug ( "Invalid box #{box_name}, ignoring: #{provider}" ) end end end end end results . sort_by! do | box_result | [ box_result [ 0 ] , box_result [ 2 ] , Gem :: Version . new ( box_result [ 1 ] ) ] end results end | This returns an array of all the boxes on the system given by their name and their provider . |
397 | def find ( name , providers , version ) providers = Array ( providers ) requirements = version . to_s . split ( "," ) . map do | v | Gem :: Requirement . new ( v . strip ) end with_collection_lock do box_directory = @directory . join ( dir_name ( name ) ) if ! box_directory . directory? @logger . info ( "Box not found: #{name} (#{providers.join(", ")})" ) return nil end version_dir_map = { } versions = box_directory . children ( true ) . map do | versiondir | next if ! versiondir . directory? next if versiondir . basename . to_s . start_with? ( "." ) version = Gem :: Version . new ( versiondir . basename . to_s ) version_dir_map [ version . to_s ] = versiondir . basename . to_s version end . compact versions . sort . reverse . each do | v | if ! requirements . all? { | r | r . satisfied_by? ( v ) } next end versiondir = box_directory . join ( version_dir_map [ v . to_s ] ) providers . each do | provider | provider_dir = versiondir . join ( provider . to_s ) next if ! provider_dir . directory? @logger . info ( "Box found: #{name} (#{provider})" ) metadata_url = nil metadata_url_file = box_directory . join ( "metadata_url" ) metadata_url = metadata_url_file . read if metadata_url_file . file? if metadata_url && @hook hook_env = @hook . call ( :authenticate_box_url , box_urls : [ metadata_url ] ) metadata_url = hook_env [ :box_urls ] . first end return Box . new ( name , provider , version_dir_map [ v . to_s ] , provider_dir , metadata_url : metadata_url , ) end end end nil end | Find a box in the collection with the given name and provider . |
398 | def upgrade_v1_1_v1_5 with_collection_lock do temp_dir = Pathname . new ( Dir . mktmpdir ( TEMP_PREFIX , @temp_root ) ) @directory . children ( true ) . each do | boxdir | next if ! boxdir . directory? box_name = boxdir . basename . to_s if v1_box? ( boxdir ) upgrade_dir = v1_upgrade ( boxdir ) FileUtils . mv ( upgrade_dir , boxdir . join ( "virtualbox" ) ) end new_box_dir = temp_dir . join ( dir_name ( box_name ) , "0" ) new_box_dir . mkpath boxdir . children ( true ) . each do | providerdir | FileUtils . cp_r ( providerdir , new_box_dir . join ( providerdir . basename ) ) end end @directory . rmtree FileUtils . mv ( temp_dir . to_s , @directory . to_s ) end end | This upgrades a v1 . 1 - v1 . 4 box directory structure up to a v1 . 5 directory structure . This will raise exceptions if it fails in any way . |
399 | def clean ( name ) return false if exists? ( name ) path = File . join ( directory , dir_name ( name ) ) FileUtils . rm_rf ( path ) end | Cleans the directory for a box by removing the folders that are empty . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.