idx
int64
0
24.9k
question
stringlengths
68
4.14k
target
stringlengths
9
749
6,300
def extract_export_code_iruby ( placeholder = random_canvas_id , export_type = 'png' , file_name = 'chart' ) js = '' js << "\n <script>" js << "\n (function() {" js << "\n \tvar chartDom = document.getElementById('#{placeholder}');" js << "\n \tvar chart = Highcharts.charts[Highcharts.attr(chartDom," js << " 'data-high...
Returns the script to export the chart in different formats in IRuby notebook
6,301
def get_all_commands_hash without_categories = { } category_command_map . each do | category , commands | without_categories . merge! ( commands ) end command_map . merge ( without_categories ) end
merges category_command_map and command_map removing categories
6,302
def run_sv_command_for_service ( sv_cmd , service_name ) if service_enabled? ( service_name ) status = run_command ( "#{base_path}/init/#{service_name} #{sv_cmd}" ) status . exitstatus else log "#{service_name} disabled" if sv_cmd == "status" && verbose 0 end end
run an sv command for a specific service name
6,303
def parse_options ( args ) args . select do | option | case option when "--quiet" , "-q" @quiet = true false when "--verbose" , "-v" @verbose = true false end end end
Set global options and remove them from the args list we pass into commands .
6,304
def retrieve_command ( command_to_run ) if command_map . has_key? ( command_to_run ) command_map [ command_to_run ] else command = nil category_command_map . each do | category , commands | command = commands [ command_to_run ] if commands . has_key? ( command_to_run ) end command end end
retrieves the commmand from either the command_map or the category_command_map if the command is not found return nil
6,305
def run ( args ) ENV [ "PATH" ] = [ File . join ( base_path , "bin" ) , File . join ( base_path , "embedded" , "bin" ) , ENV [ 'PATH' ] ] . join ( ":" ) command_to_run = args [ 0 ] if command_to_run == "--help" command_to_run = "help" end options = args [ 2 .. - 1 ] || [ ] if is_option? ( args [ 1 ] ) options . unshift...
Previously this would exit immediately with the provided exit code ; however this would prevent post - run hooks from continuing Instead we ll just track whether a an exit was requested and use that to determine how we exit from run
6,306
def status_post_hook ( service = nil ) if service . nil? log_external_service_header external_services . each_key do | service_name | status = send ( to_method_name ( "external_status_#{service_name}" ) , :sparse ) log status end else if service_external? ( service ) status = send ( to_method_name ( "external_status_#{...
Status gets its own hook because each externalized service will have its own things to do in order to report status . As above we may also include an output header to show that we re reporting on external services .
6,307
def enable_rate_limit_forward ( admin_api_key , end_user_ip , rate_limit_api_key ) headers [ Protocol :: HEADER_API_KEY ] = admin_api_key headers [ Protocol :: HEADER_FORWARDED_IP ] = end_user_ip headers [ Protocol :: HEADER_FORWARDED_API_KEY ] = rate_limit_api_key end
Allow to use IP rate limit when you have a proxy between end - user and Algolia . This option will set the X - Forwarded - For HTTP header with the client IP and the X - Forwarded - API - Key with the API Key having rate limits .
6,308
def multiple_queries ( queries , options = nil , strategy = nil ) if options . is_a? ( Hash ) index_name_key = options . delete ( :index_name_key ) || options . delete ( 'index_name_key' ) strategy = options . delete ( :strategy ) || options . delete ( 'strategy' ) request_options = options . delete ( :request_options ...
This method allows to query multiple indexes with one API call
6,309
def move_index ( src_index , dst_index , request_options = { } ) request = { 'operation' => 'move' , 'destination' => dst_index } post ( Protocol . index_operation_uri ( src_index ) , request . to_json , :write , request_options ) end
Move an existing index .
6,310
def move_index! ( src_index , dst_index , request_options = { } ) res = move_index ( src_index , dst_index , request_options ) wait_task ( dst_index , res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Move an existing index and wait until the move has been processed
6,311
def copy_index ( src_index , dst_index , scope = nil , request_options = { } ) request = { 'operation' => 'copy' , 'destination' => dst_index } request [ 'scope' ] = scope unless scope . nil? post ( Protocol . index_operation_uri ( src_index ) , request . to_json , :write , request_options ) end
Copy an existing index .
6,312
def copy_index! ( src_index , dst_index , scope = nil , request_options = { } ) res = copy_index ( src_index , dst_index , scope , request_options ) wait_task ( dst_index , res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Copy an existing index and wait until the copy has been processed .
6,313
def copy_settings! ( src_index , dst_index , request_options = { } ) res = copy_settings ( src_index , dst_index , request_options ) wait_task ( dst_index , res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Copy an existing index settings and wait until the copy has been processed .
6,314
def copy_synonyms! ( src_index , dst_index , request_options = { } ) res = copy_synonyms ( src_index , dst_index , request_options ) wait_task ( dst_index , res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Copy an existing index synonyms and wait until the copy has been processed .
6,315
def copy_rules! ( src_index , dst_index , request_options = { } ) res = copy_rules ( src_index , dst_index , request_options ) wait_task ( dst_index , res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Copy an existing index rules and wait until the copy has been processed .
6,316
def get_logs ( options = nil , length = nil , type = nil ) if options . is_a? ( Hash ) offset = options . delete ( 'offset' ) || options . delete ( :offset ) length = options . delete ( 'length' ) || options . delete ( :length ) type = options . delete ( 'type' ) || options . delete ( :type ) request_options = options ...
Return last logs entries .
6,317
def batch! ( operations , request_options = { } ) res = batch ( operations , request_options ) res [ 'taskID' ] . each do | index , taskID | wait_task ( index , taskID , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) end end
Send a batch request targeting multiple indices and wait the end of the indexing
6,318
def get_task_status ( index_name , taskID , request_options = { } ) get ( Protocol . task_uri ( index_name , taskID ) , :read , request_options ) [ 'status' ] end
Check the status of a task on the server . All server task are asynchronous and you can check the status of a task with this method .
6,319
def request ( uri , method , data = nil , type = :write , request_options = { } ) exceptions = [ ] connect_timeout = @connect_timeout send_timeout = if type == :search @search_timeout elsif type == :batch type = :write @batch_timeout else @send_timeout end receive_timeout = type == :search ? @search_timeout : @receive_...
Perform an HTTP request for the given uri and method with common basic response handling . Will raise a AlgoliaProtocolError if the response has an error status code and will return the parsed JSON body on success if there is one .
6,320
def thread_local_hosts ( read ) thread_hosts_key = read ? "algolia_search_hosts_#{application_id}" : "algolia_hosts_#{application_id}" Thread . current [ thread_hosts_key ] ||= ( read ? search_hosts : hosts ) . each_with_index . map do | host , i | client = HTTPClient . new client . ssl_config . ssl_version = @ssl_vers...
This method returns a thread - local array of sessions
6,321
def delete! ( request_options = { } ) res = delete ( request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Delete an index and wait until the deletion has been processed
6,322
def add_object ( object , objectID = nil , request_options = { } ) check_object ( object ) if objectID . nil? || objectID . to_s . empty? client . post ( Protocol . index_uri ( name ) , object . to_json , :write , request_options ) else client . put ( Protocol . object_uri ( name , objectID ) , object . to_json , :writ...
Add an object in this index
6,323
def add_object! ( object , objectID = nil , request_options = { } ) res = add_object ( object , objectID , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Add an object in this index and wait end of indexing
6,324
def add_objects! ( objects , request_options = { } ) res = add_objects ( objects , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Add several objects in this index and wait end of indexing
6,325
def search ( query , params = { } , request_options = { } ) encoded_params = Hash [ params . map { | k , v | [ k . to_s , v . is_a? ( Array ) ? v . to_json : v ] } ] encoded_params [ :query ] = query client . post ( Protocol . search_post_uri ( name ) , { :params => Protocol . to_query ( encoded_params ) } . to_json , ...
Search inside the index
6,326
def browse ( page_or_query_parameters = nil , hits_per_page = nil , request_options = { } , & block ) params = { } if page_or_query_parameters . is_a? ( Hash ) params . merge! ( page_or_query_parameters ) else params [ :page ] = page_or_query_parameters unless page_or_query_parameters . nil? end if hits_per_page . is_a...
Browse all index content
6,327
def browse_from ( cursor , hits_per_page = 1000 , request_options = { } ) client . post ( Protocol . browse_uri ( name ) , { :cursor => cursor , :hitsPerPage => hits_per_page } . to_json , :read , request_options ) end
Browse a single page from a specific cursor
6,328
def get_object ( objectID , attributes_to_retrieve = nil , request_options = { } ) attributes_to_retrieve = attributes_to_retrieve . join ( ',' ) if attributes_to_retrieve . is_a? ( Array ) if attributes_to_retrieve . nil? client . get ( Protocol . object_uri ( name , objectID , nil ) , :read , request_options ) else c...
Get an object from this index
6,329
def get_objects ( objectIDs , attributes_to_retrieve = nil , request_options = { } ) attributes_to_retrieve = attributes_to_retrieve . join ( ',' ) if attributes_to_retrieve . is_a? ( Array ) requests = objectIDs . map do | objectID | req = { :indexName => name , :objectID => objectID . to_s } req [ :attributesToRetrie...
Get a list of objects from this index
6,330
def save_object ( object , objectID = nil , request_options = { } ) client . put ( Protocol . object_uri ( name , get_objectID ( object , objectID ) ) , object . to_json , :write , request_options ) end
Override the content of an object
6,331
def save_object! ( object , objectID = nil , request_options = { } ) res = save_object ( object , objectID , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Override the content of object and wait end of indexing
6,332
def save_objects! ( objects , request_options = { } ) res = save_objects ( objects , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Override the content of several objects and wait end of indexing
6,333
def replace_all_objects ( objects , request_options = { } ) safe = request_options [ :safe ] || request_options [ 'safe' ] || false request_options . delete ( :safe ) request_options . delete ( 'safe' ) tmp_index = @client . init_index ( @name + '_tmp_' + rand ( 10000000 ) . to_s ) responses = [ ] scope = [ 'settings' ...
Override the current objects by the given array of objects and wait end of indexing . Settings synonyms and query rules are untouched . The objects are replaced without any downtime .
6,334
def partial_update_objects ( objects , create_if_not_exits = true , request_options = { } ) if create_if_not_exits batch ( build_batch ( 'partialUpdateObject' , objects , true ) , request_options ) else batch ( build_batch ( 'partialUpdateObjectNoCreate' , objects , true ) , request_options ) end end
Partially override the content of several objects
6,335
def partial_update_objects! ( objects , create_if_not_exits = true , request_options = { } ) res = partial_update_objects ( objects , create_if_not_exits , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Partially override the content of several objects and wait end of indexing
6,336
def delete_object ( objectID , request_options = { } ) raise ArgumentError . new ( 'objectID must not be blank' ) if objectID . nil? || objectID == '' client . delete ( Protocol . object_uri ( name , objectID ) , :write , request_options ) end
Delete an object from the index
6,337
def delete_object! ( objectID , request_options = { } ) res = delete_object ( objectID , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Delete an object from the index and wait end of indexing
6,338
def delete_objects ( objects , request_options = { } ) check_array ( objects ) batch ( build_batch ( 'deleteObject' , objects . map { | objectID | { :objectID => objectID } } , false ) , request_options ) end
Delete several objects
6,339
def delete_objects! ( objects , request_options = { } ) res = delete_objects ( objects , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Delete several objects and wait end of indexing
6,340
def delete_by_query ( query , params = nil , request_options = { } ) raise ArgumentError . new ( 'query cannot be nil, use the `clear` method to wipe the entire index' ) if query . nil? && params . nil? params = sanitized_delete_by_query_params ( params ) params [ :query ] = query params [ :hitsPerPage ] = 1000 params ...
Delete all objects matching a query This method retrieves all objects synchronously but deletes in batch asynchronously
6,341
def delete_by_query! ( query , params = nil , request_options = { } ) res = delete_by_query ( query , params , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) if res res end
Delete all objects matching a query and wait end of indexing
6,342
def clear! ( request_options = { } ) res = clear ( request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Delete the index content and wait end of indexing
6,343
def set_settings ( new_settings , options = { } , request_options = { } ) client . put ( Protocol . settings_uri ( name , options ) , new_settings . to_json , :write , request_options ) end
Set settings for this index
6,344
def set_settings! ( new_settings , options = { } , request_options = { } ) res = set_settings ( new_settings , options , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Set settings for this index and wait end of indexing
6,345
def get_settings ( options = { } , request_options = { } ) options [ 'getVersion' ] = 2 if ! options [ :getVersion ] && ! options [ 'getVersion' ] client . get ( Protocol . settings_uri ( name , options ) . to_s , :read , request_options ) end
Get settings of this index
6,346
def get_api_key ( key , request_options = { } ) client . get ( Protocol . index_key_uri ( name , key ) , :read , request_options ) end
Get ACL of a user key
6,347
def delete_api_key ( key , request_options = { } ) client . delete ( Protocol . index_key_uri ( name , key ) , :write , request_options ) end
Delete an existing user key
6,348
def batch ( request , request_options = { } ) client . post ( Protocol . batch_uri ( name ) , request . to_json , :batch , request_options ) end
Send a batch request
6,349
def batch! ( request , request_options = { } ) res = batch ( request , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Send a batch request and wait the end of the indexing
6,350
def search_for_facet_values ( facet_name , facet_query , search_parameters = { } , request_options = { } ) params = search_parameters . clone params [ 'facetQuery' ] = facet_query client . post ( Protocol . search_facet_uri ( name , facet_name ) , params . to_json , :read , request_options ) end
Search for facet values
6,351
def search_disjunctive_faceting ( query , disjunctive_facets , params = { } , refinements = { } , request_options = { } ) raise ArgumentError . new ( 'Argument "disjunctive_facets" must be a String or an Array' ) unless disjunctive_facets . is_a? ( String ) || disjunctive_facets . is_a? ( Array ) raise ArgumentError . ...
Perform a search with disjunctive facets generating as many queries as number of disjunctive facets
6,352
def get_synonym ( objectID , request_options = { } ) client . get ( Protocol . synonym_uri ( name , objectID ) , :read , request_options ) end
Get a synonym
6,353
def delete_synonym! ( objectID , forward_to_replicas = false , request_options = { } ) res = delete_synonym ( objectID , forward_to_replicas , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Delete a synonym and wait the end of indexing
6,354
def save_synonym ( objectID , synonym , forward_to_replicas = false , request_options = { } ) client . put ( "#{Protocol.synonym_uri(name, objectID)}?forwardToReplicas=#{forward_to_replicas}" , synonym . to_json , :write , request_options ) end
Save a synonym
6,355
def save_synonym! ( objectID , synonym , forward_to_replicas = false , request_options = { } ) res = save_synonym ( objectID , synonym , forward_to_replicas , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Save a synonym and wait the end of indexing
6,356
def clear_synonyms! ( forward_to_replicas = false , request_options = { } ) res = clear_synonyms ( forward_to_replicas , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Clear all synonyms and wait the end of indexing
6,357
def replace_all_synonyms ( synonyms , request_options = { } ) forward_to_replicas = request_options [ :forwardToReplicas ] || request_options [ 'forwardToReplicas' ] || false batch_synonyms ( synonyms , forward_to_replicas , true , request_options ) end
Replace synonyms in the index by the given array of synonyms
6,358
def replace_all_synonyms! ( synonyms , request_options = { } ) res = replace_all_synonyms ( synonyms , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Replace synonyms in the index by the given array of synonyms and wait the end of indexing
6,359
def export_synonyms ( hits_per_page = 100 , request_options = { } , & _block ) res = [ ] page = 0 loop do curr = search_synonyms ( '' , { :hitsPerPage => hits_per_page , :page => page } , request_options ) [ 'hits' ] curr . each do | synonym | res << synonym yield synonym if block_given? end break if curr . size < hits...
Export the full list of synonyms Accepts an optional block to which it will pass each synonym Also returns an array with all the synonyms
6,360
def get_rule ( objectID , request_options = { } ) client . get ( Protocol . rule_uri ( name , objectID ) , :read , request_options ) end
Get a rule
6,361
def delete_rule! ( objectID , forward_to_replicas = false , request_options = { } ) res = delete_rule ( objectID , forward_to_replicas , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) return res end
Delete a rule and wait the end of indexing
6,362
def save_rule ( objectID , rule , forward_to_replicas = false , request_options = { } ) raise ArgumentError . new ( 'objectID must not be blank' ) if objectID . nil? || objectID == '' client . put ( "#{Protocol.rule_uri(name, objectID)}?forwardToReplicas=#{forward_to_replicas}" , rule . to_json , :write , request_optio...
Save a rule
6,363
def save_rule! ( objectID , rule , forward_to_replicas = false , request_options = { } ) res = save_rule ( objectID , rule , forward_to_replicas , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) return res end
Save a rule and wait the end of indexing
6,364
def clear_rules! ( forward_to_replicas = false , request_options = { } ) res = clear_rules ( forward_to_replicas , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) return res end
Clear all rules and wait the end of indexing
6,365
def replace_all_rules ( rules , request_options = { } ) forward_to_replicas = request_options [ :forwardToReplicas ] || request_options [ 'forwardToReplicas' ] || false batch_rules ( rules , forward_to_replicas , true , request_options ) end
Replace rules in the index by the given array of rules
6,366
def replace_all_rules! ( rules , request_options = { } ) res = replace_all_rules ( rules , request_options ) wait_task ( res [ 'taskID' ] , WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY , request_options ) res end
Replace rules in the index by the given array of rules and wait the end of indexing
6,367
def export_rules ( hits_per_page = 100 , request_options = { } , & _block ) res = [ ] page = 0 loop do curr = search_rules ( '' , { :hits_per_page => hits_per_page , :page => page } , request_options ) [ 'hits' ] curr . each do | rule | res << rule yield rule if block_given? end break if curr . size < hits_per_page pag...
Export the full list of rules Accepts an optional block to which it will pass each rule Also returns an array with all the rules
6,368
def hash_to_condition ( table , hash ) conditions = hash . map do | attr , value | if value . is_a? ( Array ) && value . size > 1 table [ attr ] . in ( value ) elsif value . is_a? ( Array ) table [ attr ] . eq ( value [ 0 ] ) else table [ attr ] . eq ( value ) end end conditions . reduce { | memo , cond | memo . and ( ...
Converts a hash into arel conditions
6,369
def method_missing ( method_id , * args ) super if method_id == :to_ary if args . empty? Nodes :: Stub . new method_id elsif ( args . size == 1 ) && ( Class === args [ 0 ] ) Nodes :: Join . new ( method_id , InnerJoin , args [ 0 ] ) else Nodes :: Function . new method_id , args end end
Node generation inside DSL blocks .
6,370
def initialize_copy ( other ) @steps = other . steps . map do | step | step . class . new ( self , * step . args ) end @content = other . content . dup @url_attributes = other . url_attributes . dup end
Used by dup and clone
6,371
def update ( obj , meta = nil ) meta ||= { } self . temp_object = TempObject . new ( obj , meta [ 'name' ] ) self . meta [ 'name' ] ||= temp_object . name if temp_object . name clear_analyser_cache add_meta ( obj . meta ) if obj . respond_to? ( :meta ) add_meta ( meta ) self end
Update the content
6,372
def shell_eval ( opts = { } ) should_escape = opts [ :escape ] != false command = yield ( should_escape ? shell . escape ( path ) : path ) run command , :escape => should_escape end
Analyse the content using a shell command
6,373
def shell_generate ( opts = { } ) ext = opts [ :ext ] || self . ext should_escape = opts [ :escape ] != false tempfile = Utils . new_tempfile ( ext ) new_path = should_escape ? shell . escape ( tempfile . path ) : tempfile . path command = yield ( new_path ) run ( command , :escape => should_escape ) update ( tempfile ...
Set the content using a shell command
6,374
def method_missing ( name , * args , & block ) @loggers . each do | logger | if logger . respond_to? ( name ) logger . send ( name , args , & block ) end end end
Any method not defined on standard Logger class just send it on to anyone who will listen
6,375
def buffer_initialize ( options = { } ) if ! self . class . method_defined? ( :flush ) raise ArgumentError , "Any class including Stud::Buffer must define a flush() method." end @buffer_config = { :max_items => options [ :max_items ] || 50 , :max_interval => options [ :max_interval ] || 5 , :logger => options [ :logger...
Initialize the buffer .
6,376
def buffer_receive ( event , group = nil ) buffer_initialize if ! @buffer_state while buffer_full? do on_full_buffer_receive ( :pending => @buffer_state [ :pending_count ] , :outgoing => @buffer_state [ :outgoing_count ] ) if @buffer_config [ :has_on_full_buffer_receive ] if @buffer_config [ :drop_messages_on_full_buff...
Save an event for later delivery
6,377
def buffer_flush ( options = { } ) force = options [ :force ] || options [ :final ] final = options [ :final ] if options [ :final ] @buffer_state [ :flush_mutex ] . lock elsif ! @buffer_state [ :flush_mutex ] . try_lock return 0 end items_flushed = 0 begin time_since_last_flush = ( Time . now - @buffer_state [ :last_f...
Try to flush events .
6,378
def each return to_enum unless block_given? seen = { } tuples . each do | tuple | tuple = Tuple . coerce ( header , tuple ) yield seen [ tuple ] = tuple unless seen . key? ( tuple ) end self end
Iterate over each tuple in the set
6,379
def replace ( other ) other = coerce ( other ) delete ( difference ( other ) ) . insert ( other . difference ( self ) ) end
Return a relation that represents a replacement of a relation
6,380
def define_inheritable_alias_method ( new_method , original_method ) define_method ( new_method ) do | * args , & block | public_send ( original_method , * args , & block ) end end
Create a new method alias for the original method
6,381
def extend ( header , extensions ) join ( header , extensions . map { | extension | Function . extract_value ( extension , self ) } ) end
Extend a tuple with function results
6,382
def predicate header . reduce ( Function :: Proposition :: Tautology . instance ) do | predicate , attribute | predicate . and ( attribute . eq ( attribute . call ( self ) ) ) end end
Return the predicate matching the tuple
6,383
def run_task ( _verbose ) data . each do | path , string | opts = { } path = File . expand_path ( path ) if string . is_a? ( Hash ) input = string [ :input ] opts [ :patterns ] = string [ :patterns ] if string . key? :patterns opts [ :color ] = string [ :color ] if string . key? :color opts [ :base_color ] = string [ :...
Create a new geo pattern task
6,384
def include ( modules ) modules = Array ( modules ) modules . each { | m | self . class . include m } end
Include module in instance
6,385
def authentication_path ( local_port : nil , invite_code : nil , expires_in : nil , remote : false ) auth_url_params = { } if remote auth_url_params [ :redirect_uri ] = "/code" elsif local_port auth_url_params [ :redirect_uri ] = "http://localhost:#{local_port}/cb" else raise ArgumentError , "Local port not defined and...
Build a path for master authentication
6,386
def authentication_url_from_master ( master_url , auth_params ) client = Kontena :: Client . new ( master_url ) vspinner "Sending authentication request to receive an authorization URL" do response = client . request ( http_method : :get , path : authentication_path ( auth_params ) , expects : [ 501 , 400 , 302 , 403 ]...
Request a redirect to the authentication url from master
6,387
def select_a_server ( name , url ) if url . nil? && name . nil? if config . current_master return config . current_master else exit_with_error 'URL not specified and current master not selected' end end if name && url exact_match = config . find_server_by ( url : url , name : name ) return exact_match if exact_match na...
Figure out or create a server based on url or name .
6,388
def ensure_exposed ( cidr ) if @executor_pool . expose ( cidr ) info "Exposed host node at cidr=#{cidr}" else error "Failed to expose host node at cidr=#{cidr}" end @executor_pool . ps ( 'weave:expose' ) do | name , mac , * cidrs | cidrs . each do | exposed_cidr | if exposed_cidr != cidr warn "Migrating host node from ...
Ensure that the host weave bridge is exposed using the given CIDR address and only the given CIDR address
6,389
def get_containers containers = { } @executor_pool . ps ( ) do | id , mac , * cidrs | next if id == 'weave:expose' containers [ id ] = cidrs end containers end
Inspect current state of attached containers
6,390
def migrate_container ( container_id , cidr , attached_cidrs ) attached_cidrs . each do | attached_cidr | if cidr != attached_cidr warn "Migrate container=#{container_id} from cidr=#{attached_cidr}" @executor_pool . detach ( container_id , attached_cidr ) end end self . attach_container ( container_id , cidr ) end
Attach container to weave with given CIDR address first detaching any existing mismatching addresses
6,391
def remove_container ( container_id , overlay_network , overlay_cidr ) info "Remove container=#{container_id} from network=#{overlay_network} at cidr=#{overlay_cidr}" @ipam_client . release_address ( overlay_network , overlay_cidr ) rescue IpamError => error warn "Failed to release container=#{container_id} from networ...
Remove container from weave network
6,392
def start_container ( container ) overlay_cidr = container . overlay_cidr if overlay_cidr wait_weave_running? register_container_dns ( container ) if container . service_container? attach_overlay ( container ) else debug "skip start for container=#{container.name} without overlay_cidr" end rescue Docker :: Error :: Not...
Ensure weave network for container
6,393
def init ENV [ "GEM_HOME" ] = Common . install_dir Gem . paths = ENV Common . use_dummy_ui unless Kontena . debug? plugins true end
Initialize plugin manager
6,394
def debug ( message = nil , & block ) logger . add ( Logger :: DEBUG , message , self . logging_prefix , & block ) end
Send a debug message
6,395
def info ( message = nil , & block ) logger . add ( Logger :: INFO , message , self . logging_prefix , & block ) end
Send a info message
6,396
def warn ( message = nil , & block ) logger . add ( Logger :: WARN , message , self . logging_prefix , & block ) end
Send a warning message
6,397
def error ( message = nil , & block ) logger . add ( Logger :: ERROR , message , self . logging_prefix , & block ) end
Send an error message
6,398
def add_error ( key , error , message = nil ) if error . is_a? Symbol error = ErrorAtom . new ( key , error , message : message ) elsif error . is_a? ( Mutations :: ErrorAtom ) || error . is_a? ( Mutations :: ErrorArray ) || error . is_a? ( Mutations :: ErrorHash ) else raise ArgumentError . new ( "Invalid error of kin...
Add error for a key
6,399
def websocket_exec_write_thread ( ws , tty : nil ) Thread . new do begin if tty console_height , console_width = TTY :: Screen . size websocket_exec_write ( ws , 'tty_size' => { width : console_width , height : console_height } ) end read_stdin ( tty : tty ) do | stdin | logger . debug "websocket exec stdin with encodi...
Start thread to read from stdin and write to websocket . Closes websocket on stdin read errors .