idx
int64
0
24.9k
question
stringlengths
68
4.14k
target
stringlengths
9
749
900
def default_description description = 'Run `scss-lint' description += " --config #{config}" if config description += " #{args}" if args description += " #{files.join(' ')}" if files . any? description += ' [files...]`' description end
Friendly description that shows the full command that will be executed .
901
def run ( args ) options = SCSSLint :: Options . new . parse ( args ) act_on_options ( options ) rescue StandardError => e handle_runtime_exception ( e , options ) end
Create a CLI that outputs to the specified logger .
902
def relevant_configuration_file ( options ) if options [ :config_file ] options [ :config_file ] elsif File . exist? ( Config :: FILE_NAME ) Config :: FILE_NAME elsif File . exist? ( Config . user_file ) Config . user_file end end
Return the path of the configuration file that should be loaded .
903
def add_line_number ( node ) node . line ||= line if node . is_a? ( :: Sass :: Script :: Tree :: Node ) node end
The Sass parser sometimes doesn t assign line numbers in cases where it should . This is a helper to easily correct that .
904
def run ( engine , config ) @lints = [ ] @config = config @engine = engine @comment_processor = ControlCommentProcessor . new ( self ) visit ( engine . tree ) @lints = @comment_processor . filter_lints ( @lints ) end
Create a linter . Run this linter against a parsed document with the given configuration returning the lints that were found .
905
def add_lint ( node_or_line_or_location , message ) @lints << Lint . new ( self , engine . filename , extract_location ( node_or_line_or_location ) , message , @config . fetch ( 'severity' , :warning ) . to_sym ) end
Helper for creating lint from a parse tree node
906
def source_from_range ( source_range ) current_line = source_range . start_pos . line - 1 last_line = source_range . end_pos . line - 1 start_pos = source_range . start_pos . offset - 1 source = if current_line == last_line engine . lines [ current_line ] [ start_pos .. ( source_range . end_pos . offset - 1 ) ] else engine . lines [ current_line ] [ start_pos .. - 1 ] end current_line += 1 while current_line < last_line source += engine . lines [ current_line ] . to_s current_line += 1 end if source_range . start_pos . line != source_range . end_pos . line source += ( ( engine . lines [ current_line ] || '' ) [ 0 ... source_range . end_pos . offset ] ) . to_s end source end
Extracts the original source code given a range .
907
def node_on_single_line? ( node ) return if node . source_range . start_pos . line != node . source_range . end_pos . line engine . lines [ node . line - 1 ] . strip [ - 1 ] != '{' end
Returns whether a given node spans only a single line .
908
def visit ( node ) if node . is_a? ( Sass :: Tree :: RuleNode ) && node . parsed_rules visit_selector ( node . parsed_rules ) end @comment_processor . before_node_visit ( node ) if @engine . any_control_commands super @comment_processor . after_node_visit ( node ) if @engine . any_control_commands end
Modified so we can also visit selectors in linters
909
def visit_children ( parent ) parent . children . each do | child | child . node_parent = parent visit ( child ) end end
Redefine so we can set the node_parent of each node
910
def sort_args_by_position ( * args ) args . flatten . compact . sort_by do | arg | pos = arg . source_range . end_pos [ pos . line , pos . offset ] end end
Since keyword arguments are not guaranteed to be in order use the source range to order arguments so we check them in the order they were declared .
911
def find_comma_position ( arg ) offset = 0 pos = arg . source_range . end_pos if character_at ( pos , offset ) != ',' loop do offset += 1 break if ( right_char = character_at ( pos , offset ) ) == ',' offset = - offset break if ( left_char = character_at ( pos , offset ) ) == ',' offset = - offset next unless right_char . nil? && left_char . nil? offset = 0 pos = Sass :: Source :: Position . new ( pos . line + 1 , 1 ) break if character_at ( pos , offset ) == ',' end end Sass :: Source :: Position . new ( pos . line , pos . offset + offset ) end
Find the comma following this argument .
912
def run_linter ( linter , engine , file_path ) return if @config . excluded_file_for_linter? ( file_path , linter ) @lints += linter . run ( engine , @config . linter_options ( linter ) ) end
For stubbing in tests .
913
def property_key ( prop ) prop_key = prop . name . join prop_value = value_as_string ( prop . value . first ) prop_value . to_s . scan ( / / ) do | vendor_keyword | prop_key << vendor_keyword . first end prop_key end
Returns a key identifying the bucket this property and value correspond to for purposes of uniqueness .
914
def newline_before_nonwhitespace ( string ) offset = - 2 while / \S / . match ( string [ offset ] ) . nil? return true if string [ offset ] == "\n" offset -= 1 end false end
Check if starting from the end of a string and moving backwards towards the beginning we find a new line before any non - whitespace characters
915
def check_multiline_sequence ( node , sequence , index ) return unless sequence . members . size > 1 return unless sequence . members [ 2 .. - 1 ] . any? { | member | member == "\n" } add_lint ( node . line + index , MESSAGE ) end
Checks if an individual sequence is split over multiple lines
916
def check_units ( node , property , units ) allowed_units = allowed_units_for_property ( property ) return if allowed_units . include? ( units ) add_lint ( node , "#{units} units not allowed on `#{property}`; must be one of " "(#{allowed_units.to_a.sort.join(', ')})" ) end
Checks if a property value s units are allowed .
917
def check_children_order ( sorted_children , children ) sorted_children . each_with_index do | sorted_item , index | next if sorted_item == children [ index ] add_lint ( sorted_item . first . line , "Expected item on line #{sorted_item.first.line} to appear " "before line #{children[index].first.line}. #{MESSAGE}" ) break end end
Find the child that is out of place
918
def node_ancestor ( node , levels ) while levels > 0 node = node . node_parent return unless node levels -= 1 end node end
Return nth - ancestor of a node where 1 is the parent 2 is grandparent etc .
919
def ignore_property? ( prop_node ) return true if prop_node . name . any? { | part | ! part . is_a? ( String ) } config [ 'ignore_unspecified' ] && @preferred_order && ! @preferred_order . include? ( prop_node . name . join ) end
Return whether to ignore a property in the sort order .
920
def matches_conditions? ( action , subject , attribute = nil , * extra_args ) return call_block_with_all ( action , subject , extra_args ) if @match_all return matches_block_conditions ( subject , attribute , * extra_args ) if @block return matches_non_block_conditions ( subject ) unless conditions_empty? true end
Matches the block or conditions hash
921
def can? ( action , subject , attribute = nil , * extra_args ) match = extract_subjects ( subject ) . lazy . map do | a_subject | relevant_rules_for_match ( action , a_subject ) . detect do | rule | rule . matches_conditions? ( action , a_subject , attribute , * extra_args ) && rule . matches_attributes? ( attribute ) end end . reject ( & :nil? ) . first match ? match . base_behavior : false end
Check if the user has permission to perform a given action on an object .
922
def can ( action = nil , subject = nil , * attributes_and_conditions , & block ) add_rule ( Rule . new ( true , action , subject , * attributes_and_conditions , & block ) ) end
Defines which abilities are allowed using two arguments . The first one is the action you re setting the permission for the second one is the class of object you re setting it on .
923
def cannot ( action = nil , subject = nil , * attributes_and_conditions , & block ) add_rule ( Rule . new ( false , action , subject , * attributes_and_conditions , & block ) ) end
Defines an ability which cannot be done . Accepts the same arguments as can .
924
def validate_target ( target ) error_message = "You can't specify target (#{target}) as alias because it is real action name" raise Error , error_message if aliased_actions . values . flatten . include? target end
User shouldn t specify targets with names of real actions or it will cause Seg fault
925
def perform! Logger . info "Creating Archive '#{name}'..." path = File . join ( Config . tmp_path , @model . trigger , "archives" ) FileUtils . mkdir_p ( path ) pipeline = Pipeline . new with_files_from ( paths_to_package ) do | files_from | pipeline . add ( "#{tar_command} #{tar_options} -cPf -#{tar_root} " "#{paths_to_exclude} #{files_from}" , tar_success_codes ) extension = "tar" if @model . compressor @model . compressor . compress_with do | command , ext | pipeline << command extension << ext end end pipeline << "#{utility(:cat)} > " "'#{File.join(path, "#{name}.#{extension}")}'" pipeline . run end if pipeline . success? Logger . info "Archive '#{name}' Complete!" else raise Error , "Failed to Create Archive '#{name}'\n" + pipeline . error_messages end end
Adds a new Archive to a Backup Model .
926
def database ( name , database_id = nil , & block ) @databases << get_class_from_scope ( Database , name ) . new ( self , database_id , & block ) end
Adds an Database . Multiple Databases may be added to the model .
927
def store_with ( name , storage_id = nil , & block ) @storages << get_class_from_scope ( Storage , name ) . new ( self , storage_id , & block ) end
Adds an Storage . Multiple Storages may be added to the model .
928
def sync_with ( name , syncer_id = nil , & block ) @syncers << get_class_from_scope ( Syncer , name ) . new ( syncer_id , & block ) end
Adds an Syncer . Multiple Syncers may be added to the model .
929
def split_into_chunks_of ( chunk_size , suffix_length = 3 ) if chunk_size . is_a? ( Integer ) && suffix_length . is_a? ( Integer ) @splitter = Splitter . new ( self , chunk_size , suffix_length ) else raise Error , <<-EOS EOS end end
Adds a Splitter to split the final backup package into multiple files .
930
def perform! @started_at = Time . now . utc @time = package . time = started_at . strftime ( "%Y.%m.%d.%H.%M.%S" ) log! ( :started ) before_hook procedures . each do | procedure | procedure . is_a? ( Proc ) ? procedure . call : procedure . each ( & :perform! ) end syncers . each ( & :perform! ) rescue Interrupt @interrupted = true raise rescue Exception => err @exception = err ensure unless @interrupted set_exit_status @finished_at = Time . now . utc log! ( :finished ) after_hook end end
Performs the backup process
931
def procedures return [ ] unless databases . any? || archives . any? [ -> { prepare! } , databases , archives , -> { package! } , -> { store! } , -> { clean! } ] end
Returns an array of procedures that will be performed if any Archives or Databases are configured for the model .
932
def store! storage_results = storages . map do | storage | begin storage . perform! rescue => ex ex end end first_exception , * other_exceptions = storage_results . select { | result | result . is_a? Exception } if first_exception other_exceptions . each do | exception | Logger . error exception . to_s Logger . error exception . backtrace . join ( '\n' ) end raise first_exception else true end end
Attempts to use all configured Storages even if some of them result in exceptions . Returns true or raises first encountered exception .
933
def log! ( action ) case action when :started Logger . info "Performing Backup for '#{label} (#{trigger})'!\n" "[ backup #{VERSION} : #{RUBY_DESCRIPTION} ]" when :finished if exit_status > 1 ex = exit_status == 2 ? Error : FatalError err = ex . wrap ( exception , "Backup for #{label} (#{trigger}) Failed!" ) Logger . error err Logger . error "\nBacktrace:\n\s\s" + err . backtrace . join ( "\n\s\s" ) + "\n\n" Cleaner . warnings ( self ) else msg = "Backup for '#{label} (#{trigger})' " if exit_status == 1 msg << "Completed Successfully (with Warnings) in #{duration}" Logger . warn msg else msg << "Completed Successfully in #{duration}" Logger . info msg end end end end
Logs messages when the model starts and finishes .
934
def after_packaging suffixes = chunk_suffixes first_suffix = "a" * suffix_length if suffixes == [ first_suffix ] FileUtils . mv ( File . join ( Config . tmp_path , "#{package.basename}-#{first_suffix}" ) , File . join ( Config . tmp_path , package . basename ) ) else package . chunk_suffixes = suffixes end end
Finds the resulting files from the packaging procedure and stores an Array of suffixes used in
935
def include_required_submodules! class_eval do @sorcery_config . submodules = :: Sorcery :: Controller :: Config . submodules @sorcery_config . submodules . each do | mod | begin include Submodules . const_get ( mod . to_s . split ( '_' ) . map ( & :capitalize ) . join ) rescue NameError end end end end
includes required submodules into the model class which usually is called User .
936
def init_orm_hooks! sorcery_adapter . define_callback :before , :validation , :encrypt_password , if : proc { | record | record . send ( sorcery_config . password_attribute_name ) . present? } sorcery_adapter . define_callback :after , :save , :clear_virtual_password , if : proc { | record | record . send ( sorcery_config . password_attribute_name ) . present? } attr_accessor sorcery_config . password_attribute_name end
add virtual password accessor and ORM callbacks .
937
def explain ( value = nil ) chain { criteria . update_request_options explain : ( value . nil? ? true : value ) } end
Comparation with other query or collection If other is collection - search request is executed and result is used for comparation
938
def limit ( value = nil , & block ) chain { criteria . update_request_options size : block || Integer ( value ) } end
Sets elasticsearch size search request param Default value is set in the elasticsearch and is 10 .
939
def offset ( value = nil , & block ) chain { criteria . update_request_options from : block || Integer ( value ) } end
Sets elasticsearch from search request param
940
def facets ( params = nil ) raise RemovedFeature , 'removed in elasticsearch 2.0' if Runtime . version >= '2.0' if params chain { criteria . update_facets params } else _response [ 'facets' ] || { } end end
Adds facets section to the search request . All the chained facets a merged and added to the search request
941
def script_score ( script , options = { } ) scoring = { script_score : { script : script } . merge ( options ) } chain { criteria . update_scores scoring } end
Adds a script function to score the search request . All scores are added to the search request and combinded according to boost_mode and score_mode
942
def boost_factor ( factor , options = { } ) scoring = options . merge ( boost_factor : factor . to_i ) chain { criteria . update_scores scoring } end
Adds a boost factor to the search request . All scores are added to the search request and combinded according to boost_mode and score_mode
943
def random_score ( seed = Time . now , options = { } ) scoring = options . merge ( random_score : { seed : seed . to_i } ) chain { criteria . update_scores scoring } end
Adds a random score to the search request . All scores are added to the search request and combinded according to boost_mode and score_mode
944
def field_value_factor ( settings , options = { } ) scoring = options . merge ( field_value_factor : settings ) chain { criteria . update_scores scoring } end
Add a field value scoring to the search . All scores are added to the search request and combinded according to boost_mode and score_mode
945
def decay ( function , field , options = { } ) field_options = options . extract! ( :origin , :scale , :offset , :decay ) . delete_if { | _ , v | v . nil? } scoring = options . merge ( function => { field => field_options } ) chain { criteria . update_scores scoring } end
Add a decay scoring to the search . All scores are added to the search request and combinded according to boost_mode and score_mode
946
def aggregations ( params = nil ) @_named_aggs ||= _build_named_aggs @_fully_qualified_named_aggs ||= _build_fqn_aggs if params params = { params => @_named_aggs [ params ] } if params . is_a? ( Symbol ) params = { params => _get_fully_qualified_named_agg ( params ) } if params . is_a? ( String ) && params =~ / \A \S \S \. \S \z / chain { criteria . update_aggregations params } else _response [ 'aggregations' ] || { } end end
Sets elasticsearch aggregations search request param
947
def _build_named_aggs named_aggs = { } @_indexes . each do | index | index . types . each do | type | type . _agg_defs . each do | agg_name , prc | named_aggs [ agg_name ] = prc . call end end end named_aggs end
In this simplest of implementations each named aggregation must be uniquely named
948
def delete_all if Runtime . version >= '2.0' plugins = Chewy . client . nodes . info ( plugins : true ) [ 'nodes' ] . values . map { | item | item [ 'plugins' ] } . flatten raise PluginMissing , 'install delete-by-query plugin' unless plugins . find { | item | item [ 'name' ] == 'delete-by-query' } end request = chain { criteria . update_options simple : true } . send ( :_request ) ActiveSupport :: Notifications . instrument 'delete_query.chewy' , request : request , indexes : _indexes , types : _types , index : _indexes . one? ? _indexes . first : _indexes , type : _types . one? ? _types . first : _types do if Runtime . version >= '2.0' path = Elasticsearch :: API :: Utils . __pathify ( Elasticsearch :: API :: Utils . __listify ( request [ :index ] ) , Elasticsearch :: API :: Utils . __listify ( request [ :type ] ) , '/_query' ) Chewy . client . perform_request ( Elasticsearch :: API :: HTTP_DELETE , path , { } , request [ :body ] ) . body else Chewy . client . delete_by_query ( request ) end end end
Deletes all documents matching a query .
949
def find ( * ids ) results = chain { criteria . update_options simple : true } . filter { _id == ids . flatten } . to_a raise Chewy :: DocumentNotFound , "Could not find documents for ids #{ids.flatten}" if results . empty? ids . one? && ! ids . first . is_a? ( Array ) ? results . first : results end
Find all documents matching a query .
950
def bolt_command_on ( host , command , flags = { } , opts = { } ) bolt_command = command . dup flags . each { | k , v | bolt_command << " #{k} #{v}" } case host [ 'platform' ] when / / execute_powershell_script_on ( host , bolt_command , opts ) when / / env = 'source /etc/profile ~/.bash_profile ~/.bash_login ~/.profile && env LANG=en_US.UTF-8' on ( host , env + ' ' + bolt_command ) else on ( host , bolt_command , opts ) end end
A helper to build a bolt command used in acceptance testing
951
def count_statements ( ast ) case ast when Puppet :: Pops :: Model :: Program count_statements ( ast . body ) when Puppet :: Pops :: Model :: BlockExpression ast . statements . count else 1 end end
Count the number of top - level statements in the AST .
952
def create_cache_dir ( sha ) file_dir = File . join ( @cache_dir , sha ) @cache_dir_mutex . with_read_lock do FileUtils . mkdir_p ( file_dir , mode : 0o750 ) FileUtils . touch ( file_dir ) end file_dir end
Create a cache dir if necessary and update it s last write time . Returns the dir . Acquires
953
def update_file ( file_data ) sha = file_data [ 'sha256' ] file_dir = create_cache_dir ( file_data [ 'sha256' ] ) file_path = File . join ( file_dir , File . basename ( file_data [ 'filename' ] ) ) if check_file ( file_path , sha ) @logger . debug ( "Using prexisting task file: #{file_path}" ) return file_path end @logger . debug ( "Queueing download for: #{file_path}" ) serial_execute { download_file ( file_path , sha , file_data [ 'uri' ] ) } end
If the file doesn t exist or is invalid redownload it This downloads validates and moves into place
954
def as_resultset ( targets ) result_array = begin yield rescue StandardError => e @logger . warn ( e ) Array ( Bolt :: Result . from_exception ( targets [ 0 ] , e ) ) end Bolt :: ResultSet . new ( result_array ) end
This handles running the job catching errors and turning the result into a result set
955
def queue_execute ( targets ) targets . group_by ( & :transport ) . flat_map do | protocol , protocol_targets | transport = transport ( protocol ) report_transport ( transport , protocol_targets . count ) transport . batches ( protocol_targets ) . flat_map do | batch | batch_promises = Array ( batch ) . each_with_object ( { } ) do | target , h | h [ target ] = Concurrent :: Promise . new ( executor : :immediate ) end @pool . post ( batch_promises ) do | result_promises | begin results = yield transport , batch Array ( results ) . each do | result | result_promises [ result . target ] . set ( result ) end rescue StandardError , NotImplementedError => e result_promises . each do | target , promise | @logger . warn ( e ) promise . set ( Bolt :: Result . from_exception ( target , e ) ) end ensure result_promises . each do | target , promise | next if promise . fulfilled? error = $ERROR_INFO || Bolt :: Error . new ( "No result was returned for #{target.uri}" , "puppetlabs.bolt/missing-result-error" ) promise . set ( Bolt :: Result . from_exception ( target , error ) ) end end end batch_promises . values end end end
Starts executing the given block on a list of nodes in parallel one thread per batch .
956
def parse_manifest ( code , filename ) Puppet :: Pops :: Parser :: EvaluatingParser . new . parse_string ( code , filename ) rescue Puppet :: Error => e raise Bolt :: PAL :: PALError , "Failed to parse manifest: #{e}" end
Parses a snippet of Puppet manifest code and returns the AST represented in JSON .
957
def plan_hash ( plan_name , plan ) elements = plan . params_type . elements || [ ] parameters = elements . each_with_object ( { } ) do | param , acc | type = if param . value_type . is_a? ( Puppet :: Pops :: Types :: PTypeAliasType ) param . value_type . name else param . value_type . to_s end acc [ param . name ] = { 'type' => type } acc [ param . name ] [ 'default_value' ] = nil if param . key_type . is_a? ( Puppet :: Pops :: Types :: POptionalType ) end { 'name' => plan_name , 'parameters' => parameters } end
This converts a plan signature object into a format used by the outputter . Must be called from within bolt compiler to pickup type aliases used in the plan signature .
958
def list_modules internal_module_groups = { BOLTLIB_PATH => 'Plan Language Modules' , MODULES_PATH => 'Packaged Modules' } in_bolt_compiler do Puppet . lookup ( :current_environment ) . modules_by_path . map do | path , modules | module_group = internal_module_groups [ path ] values = modules . map do | mod | mod_info = { name : ( mod . forge_name || mod . name ) , version : mod . version } mod_info [ :internal_module_group ] = module_group unless module_group . nil? mod_info end [ path , values ] end . to_h end end
Returns a mapping of all modules available to the Bolt compiler
959
def update_conf ( conf ) @protocol = conf [ :transport ] t_conf = conf [ :transports ] [ transport . to_sym ] || { } @user = t_conf [ 'user' ] @password = t_conf [ 'password' ] @port = t_conf [ 'port' ] @host = t_conf [ 'host' ] @options = t_conf . merge ( @options ) self end
URI can be passes as nil
960
def config @config ||= begin conf = Bolt :: Config . new ( Bolt :: Boltdir . new ( '.' ) , { } ) conf . modulepath = [ modulepath ] . flatten conf end end
Override in your tests
961
def to_bolt_level ( level_num ) level_str = Log4r :: LNAMES [ level_num ] &. downcase || 'debug' if level_str =~ / / :debug else level_str . to_sym end end
Convert an r10k log level to a bolt log level . These correspond 1 - to - 1 except that r10k has debug debug1 and debug2 . The log event has the log level as an integer that we need to look up .
962
def update_target ( target ) data = @groups . data_for ( target . name ) data ||= { } unless data [ 'config' ] @logger . debug ( "Did not find config for #{target.name} in inventory" ) data [ 'config' ] = { } end data = self . class . localhost_defaults ( data ) if target . name == 'localhost' set_vars_from_hash ( target . name , data [ 'vars' ] ) unless @target_vars [ target . name ] set_facts ( target . name , data [ 'facts' ] ) unless @target_facts [ target . name ] data [ 'features' ] &. each { | feature | set_feature ( target , feature ) } unless @target_features [ target . name ] conf = @config . deep_clone conf . update_from_inventory ( data [ 'config' ] ) conf . validate target . update_conf ( conf . transport_conf ) unless target . transport . nil? || Bolt :: TRANSPORTS . include? ( target . transport . to_sym ) raise Bolt :: UnknownTransportError . new ( target . transport , target . uri ) end target end
Pass a target to get_targets for a public version of this Should this reconfigure configured targets?
963
def inspect meths = %w[ id pos source expr hit_condition hit_count hit_value enabled? ] values = meths . map { | field | "#{field}: #{send(field)}" } . join ( ", " ) "#<Byebug::Breakpoint #{values}>" end
Prints all information associated to the breakpoint
964
def repl until @proceed cmd = interface . read_command ( prompt ) return if cmd . nil? next if cmd == "" run_cmd ( cmd ) end end
Main byebug s REPL
965
def run_auto_cmds ( run_level ) safely do auto_cmds_for ( run_level ) . each { | cmd | cmd . new ( self ) . execute } end end
Run permanent commands .
966
def run_cmd ( input ) safely do command = command_list . match ( input ) return command . new ( self , input ) . execute if command puts safe_inspect ( multiple_thread_eval ( input ) ) end end
Executes the received input
967
def restore return unless File . exist? ( Setting [ :histfile ] ) File . readlines ( Setting [ :histfile ] ) . reverse_each { | l | push ( l . chomp ) } end
Restores history from disk .
968
def save n_cmds = Setting [ :histsize ] > size ? size : Setting [ :histsize ] File . open ( Setting [ :histfile ] , "w" ) do | file | n_cmds . times { file . puts ( pop ) } end clear end
Saves history to disk .
969
def to_s ( n_cmds ) show_size = n_cmds ? specific_max_size ( n_cmds ) : default_max_size commands = buffer . last ( show_size ) last_ids ( show_size ) . zip ( commands ) . map do | l | format ( "%<position>5d %<command>s" , position : l [ 0 ] , command : l [ 1 ] ) end . join ( "\n" ) + "\n" end
Prints the requested numbers of history entries .
970
def ignore? ( buf ) return true if / \s / =~ buf return false if Readline :: HISTORY . empty? buffer [ Readline :: HISTORY . length - 1 ] == buf end
Whether a specific command should not be stored in history .
971
def execute subcmd_name = @match [ 1 ] return puts ( help ) unless subcmd_name subcmd = subcommand_list . match ( subcmd_name ) raise CommandNotFound . new ( subcmd_name , self . class ) unless subcmd subcmd . new ( processor , arguments ) . execute end
Delegates to subcommands or prints help if no subcommand specified .
972
def locals return [ ] unless _binding _binding . local_variables . each_with_object ( { } ) do | e , a | a [ e ] = _binding . local_variable_get ( e ) a end end
Gets local variables for the frame .
973
def deco_args return "" if args . empty? my_args = args . map do | arg | prefix , default = prefix_and_default ( arg [ 0 ] ) kls = use_short_style? ( arg ) ? "" : "##{locals[arg[1]].class}" "#{prefix}#{arg[1] || default}#{kls}" end "(#{my_args.join(', ')})" end
Builds a string containing all available args in the frame number in a verbose or non verbose way according to the value of the + callstyle + setting
974
def stack_size return 0 unless backtrace backtrace . drop_while { | l | ignored_file? ( l . first . path ) } . take_while { | l | ! ignored_file? ( l . first . path ) } . size end
Context s stack size
975
def range ( input ) return auto_range ( @match [ 1 ] || "+" ) unless input b , e = parse_range ( input ) raise ( "Invalid line range" ) unless valid_range? ( b , e ) [ b , e ] end
Line range to be printed by list .
976
def auto_range ( direction ) prev_line = processor . prev_line if direction == "=" || prev_line . nil? source_file_formatter . range_around ( frame . line ) else source_file_formatter . range_from ( move ( prev_line , size , direction ) ) end end
Set line range to be printed by list
977
def display_lines ( min , max ) puts "\n[#{min}, #{max}] in #{frame.file}" puts source_file_formatter . lines ( min , max ) . join end
Show a range of lines in the current file .
978
def run Byebug . mode = :standalone option_parser . order! ( $ARGV ) return if non_script_option? || error_in_script? $PROGRAM_NAME = program Byebug . run_init_script if init_script loop do debug_program break if quit ControlProcessor . new ( nil , interface ) . process_commands end end
Starts byebug to debug a program .
979
def option_parser @option_parser ||= OptionParser . new ( banner , 25 ) do | opts | opts . banner = banner OptionSetter . new ( self , opts ) . setup end end
Processes options passed from the command line .
980
def read_input ( prompt , save_hist = true ) line = prepare_input ( prompt ) return unless line history . push ( line ) if save_hist command_queue . concat ( split_commands ( line ) ) command_queue . shift end
Reads a new line from the interface s input stream parses it into commands and saves it to history .
981
def raw_content return if pseudo_license? unless File . exist? ( path ) raise Licensee :: InvalidLicense , "'#{key}' is not a valid license key" end @raw_content ||= File . read ( path , encoding : 'utf-8' ) end
Raw content of license file including YAML front matter
982
def similarity ( other ) overlap = ( wordset & other . wordset ) . size total = wordset . size + other . wordset . size 100.0 * ( overlap * 2.0 / total ) end
Given another license or project file calculates the similarity as a percentage of words in common
983
def content_without_title_and_version @content_without_title_and_version ||= begin @_content = nil ops = %i[ html hrs comments markdown_headings title version ] ops . each { | op | strip ( op ) } _content end end
Content with the title and version removed The first time should normally be the attribution line Used to dry up content_normalized but we need the case sensitive content with attribution first to detect attribuion in LicenseFile
984
def edit @resource = resource_class . with_reset_password_token ( resource_params [ :reset_password_token ] ) if @resource && @resource . reset_password_period_valid? client_id , token = @resource . create_token @resource . skip_confirmation! if confirmable_enabled? && ! @resource . confirmed_at @resource . allow_password_change = true if recoverable_enabled? @resource . save! yield @resource if block_given? redirect_header_options = { reset_password : true } redirect_headers = build_redirect_headers ( token , client_id , redirect_header_options ) redirect_to ( @resource . build_auth_url ( @redirect_url , redirect_headers ) ) else render_edit_error end end
this is where users arrive after visiting the password reset confirmation link
985
def create return render_create_error_missing_email unless resource_params [ :email ] @email = get_case_insensitive_field_from_resource_params ( :email ) @resource = find_resource ( :email , @email ) if @resource yield @resource if block_given? @resource . send_unlock_instructions ( email : @email , provider : 'email' , client_config : params [ :config_name ] ) if @resource . errors . empty? return render_create_success else render_create_error @resource . errors end else render_not_found_error end end
this action is responsible for generating unlock tokens and sending emails
986
def redirect_callbacks devise_mapping = get_devise_mapping redirect_route = get_redirect_route ( devise_mapping ) session [ 'dta.omniauth.auth' ] = request . env [ 'omniauth.auth' ] . except ( 'extra' ) session [ 'dta.omniauth.params' ] = request . env [ 'omniauth.params' ] redirect_to redirect_route end
intermediary route for successful omniauth authentication . omniauth does not support multiple models so we must resort to this terrible hack .
987
def omniauth_params unless defined? ( @_omniauth_params ) if request . env [ 'omniauth.params' ] && request . env [ 'omniauth.params' ] . any? @_omniauth_params = request . env [ 'omniauth.params' ] elsif session [ 'dta.omniauth.params' ] && session [ 'dta.omniauth.params' ] . any? @_omniauth_params ||= session . delete ( 'dta.omniauth.params' ) @_omniauth_params elsif params [ 'omniauth_window_type' ] @_omniauth_params = params . slice ( 'omniauth_window_type' , 'auth_origin_url' , 'resource_class' , 'origin' ) else @_omniauth_params = { } end end @_omniauth_params end
this will be determined differently depending on the action that calls it . redirect_callbacks is called upon returning from successful omniauth authentication and the target params live in an omniauth - specific request . env variable . this variable is then persisted thru the redirect using our own dta . omniauth . params session var . the omniauth_success method will access that session var and then destroy it immediately after use . In the failure case finally the omniauth params are added as query params in our monkey patch to OmniAuth in engine . rb
988
def assign_provider_attrs ( user , auth_hash ) attrs = auth_hash [ 'info' ] . slice ( * user . attribute_names ) user . assign_attributes ( attrs ) end
break out provider attribute assignment for easy method extension
989
def whitelisted_params whitelist = params_for_resource ( :sign_up ) whitelist . inject ( { } ) do | coll , key | param = omniauth_params [ key . to_s ] coll [ key ] = param if param coll end end
derive allowed params from the standard devise parameter sanitizer
990
def bitbucket_pr_from_env ( env ) branch_name = env [ "BITBUCKET_BRANCH_NAME" ] repo_slug = env [ "BITBUCKET_REPO_SLUG" ] begin Danger :: RequestSources :: BitbucketCloudAPI . new ( repo_slug , nil , branch_name , env ) . pull_request_id rescue raise "Failed to find a pull request for branch \"#{branch_name}\" on Bitbucket." end end
This is a little hacky because Bitbucket doesn t provide us a PR id
991
def paths relative_paths = gem_names . flat_map do | plugin | Dir . glob ( "vendor/gems/ruby/*/gems/#{plugin}*/lib/**/**/**/**.rb" ) end relative_paths . map { | path | File . join ( dir , path ) } end
The paths are relative to dir .
992
def validate_pr! ( cork , fail_if_no_pr ) unless EnvironmentManager . pr? ( system_env ) ci_name = EnvironmentManager . local_ci_source ( system_env ) . name . split ( "::" ) . last msg = "Not a #{ci_name} Pull Request - skipping `danger` run. " if ci_name == "CircleCI" msg << "If you only created the PR recently, try re-running your workflow." end cork . puts msg . strip . yellow exit ( fail_if_no_pr ? 1 : 0 ) end end
Could we determine that the CI source is inside a PR?
993
def print_summary ( ui ) if failed? ui . puts "\n[!] Failed\n" . red else ui . notice "Passed" end do_rules = proc do | name , rules | unless rules . empty? ui . puts "" ui . section ( name . bold ) do rules . each do | rule | title = rule . title . bold + " - #{rule.object_applied_to}" subtitles = [ rule . description , link ( rule . ref ) ] subtitles += [ rule . metadata [ :files ] . join ( ":" ) ] if rule . metadata [ :files ] ui . labeled ( title , subtitles ) ui . puts "" end end end end do_rules . call ( "Errors" . red , errors ) do_rules . call ( "Warnings" . yellow , warnings ) end
Prints a summary of the errors and warnings .
994
def class_rules [ Rule . new ( :error , 4 .. 6 , "Description Markdown" , "Above your class you need documentation that covers the scope, and the usage of your plugin." , proc do | json | json [ :body_md ] && json [ :body_md ] . empty? end ) , Rule . new ( :warning , 30 , "Tags" , "This plugin does not include `@tags tag1, tag2` and thus will be harder to find in search." , proc do | json | json [ :tags ] && json [ :tags ] . empty? end ) , Rule . new ( :warning , 29 , "References" , "Ideally, you have a reference implementation of your plugin that you can show to people, add `@see org/repo` to have the site auto link it." , proc do | json | json [ :see ] && json [ :see ] . empty? end ) , Rule . new ( :error , 8 .. 27 , "Examples" , "You should include some examples of common use-cases for your plugin." , proc do | json | json [ :example_code ] && json [ :example_code ] . empty? end ) ] end
Rules that apply to a class
995
def method_rules [ Rule . new ( :error , 40 .. 41 , "Description" , "You should include a description for your method." , proc do | json | json [ :body_md ] && json [ :body_md ] . empty? end ) , Rule . new ( :warning , 43 .. 45 , "Params" , "You should give a 'type' for the param, yes, ruby is duck-typey but it's useful for newbies to the language, use `@param [Type] name`." , proc do | json | json [ :param_couplets ] && json [ :param_couplets ] . flat_map ( & :values ) . include? ( nil ) end ) , Rule . new ( :warning , 43 .. 45 , "Unknown Param" , "You should give a 'type' for the param, yes, ruby is duck-typey but it's useful for newbies to the language, use `@param [Type] name`." , proc do | json | json [ :param_couplets ] && json [ :param_couplets ] . flat_map ( & :values ) . include? ( "Unknown" ) end ) , Rule . new ( :warning , 46 , "Return Type" , "If the function has no useful return value, use ` @return [void]` - this will be ignored by documentation generators." , proc do | json | return_hash = json [ :tags ] . find { | tag | tag [ :name ] == "return" } ! ( return_hash && return_hash [ :types ] && ! return_hash [ :types ] . first . empty? ) end ) ] end
Rules that apply to individual methods and attributes
996
def link ( ref ) if ref . kind_of? ( Range ) "@see - " + "https://github.com/dbgrandi/danger-prose/blob/v2.0.0/lib/danger_plugin.rb#L#{ref.min}#-L#{ref.max}" . blue elsif ref . kind_of? ( Integer ) "@see - " + "https://github.com/dbgrandi/danger-prose/blob/v2.0.0/lib/danger_plugin.rb#L#{ref}" . blue else "@see - " + "https://github.com/dbgrandi/danger-prose/blob/v2.0.0/lib/danger_plugin.rb" . blue end end
Generates a link to see an example of said rule
997
def resolve if ! refs . nil? and refs . select { | ref | File . file? ref } . any? paths = refs . select { | ref | File . file? ref } . map { | path | File . expand_path ( path ) } elsif refs and refs . kind_of? Array paths , gems = GemsResolver . new ( refs ) . call else paths = Dir . glob ( File . join ( "." , "lib/**/*.rb" ) ) . map { | path | File . expand_path ( path ) } end { paths : paths , gems : gems || [ ] } end
Takes an array of files gems or nothing then resolves them into paths that should be sent into the documentation parser When given existing paths map to absolute & existing paths When given a list of gems resolve for list of gems When empty imply you want to test the current lib folder as a plugin
998
def pull_request_url ( env ) url = env [ "CI_PULL_REQUEST" ] if url . nil? && ! env [ "CIRCLE_PROJECT_USERNAME" ] . nil? && ! env [ "CIRCLE_PROJECT_REPONAME" ] . nil? repo_slug = env [ "CIRCLE_PROJECT_USERNAME" ] + "/" + env [ "CIRCLE_PROJECT_REPONAME" ] if ! env [ "CIRCLE_PR_NUMBER" ] . nil? host = env [ "DANGER_GITHUB_HOST" ] || "github.com" url = "https://" + host + "/" + repo_slug + "/pull/" + env [ "CIRCLE_PR_NUMBER" ] else token = env [ "DANGER_CIRCLE_CI_API_TOKEN" ] url = fetch_pull_request_url ( repo_slug , env [ "CIRCLE_BUILD_NUM" ] , token ) end end url end
Determine if there s a PR attached to this commit and return the url if so
999
def fetch_pull_request_url ( repo_slug , build_number , token ) build_json = fetch_build ( repo_slug , build_number , token ) pull_requests = build_json [ :pull_requests ] return nil unless pull_requests . first pull_requests . first [ :url ] end
Ask the API if the commit is inside a PR