_id stringlengths 2 6 | title stringlengths 9 130 | partition stringclasses 3
values | text stringlengths 66 10.5k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q8000 | PuppetDBQuery.MongoDB.node_properties_update | train | def node_properties_update(new_node_properties)
collection = connection[node_properties_collection]
old_names = collection.find.batch_size(999).projection(_id: 1).map { |k| k[:_id] }
delete = old_names - new_node_properties.keys
data = new_node_properties.map do |k, v|
{
replace_one:
{
filter: { _id: k },
replacement: v,
upsert: true
}
}
end
collection.bulk_write(data)
collection.delete_many(_id: { '$in' => delete })
end | ruby | {
"resource": ""
} |
q8001 | PuppetDBQuery.MongoDB.meta_fact_update | train | def meta_fact_update(method, ts_begin, ts_end)
connection[meta_collection].find_one_and_update(
{},
{
'$set' => {
last_fact_update: {
ts_begin: ts_begin.iso8601,
ts_end: ts_end.iso8601,
method: method
},
method => {
ts_begin: ts_begin.iso8601,
ts_end: ts_end.iso8601
}
}
},
{ upsert: true }
)
end | ruby | {
"resource": ""
} |
q8002 | PuppetDBQuery.MongoDB.meta_node_properties_update | train | def meta_node_properties_update(ts_begin, ts_end)
connection[meta_collection].find_one_and_update(
{},
{
'$set' => {
last_node_properties_update: {
ts_begin: ts_begin.iso8601,
ts_end: ts_end.iso8601
}
}
},
{ upsert: true }
)
@node_properties_update_timestamp = ts_begin
end | ruby | {
"resource": ""
} |
q8003 | Idata.Detector.find_same_occurence | train | def find_same_occurence
selected = @candidates.select { |delim, count|
begin
CSV.parse(@sample, col_sep: delim).select{|e| !e.empty? }.map{|e| e.count}.uniq.count == 1
rescue Exception => ex
false
end
}.keys
return selected.first if selected.count == 1
return DEFAULT_DELIMITER if selected.include?(DEFAULT_DELIMITER)
end | ruby | {
"resource": ""
} |
q8004 | DuckMap.Route.keys_required? | train | def keys_required?
keys = self.segment_keys.dup
keys.delete(:format)
return keys.length > 0 ? true : false
end | ruby | {
"resource": ""
} |
q8005 | Ork.ResultSet.next_page | train | def next_page
raise Ork::NoNextPage.new 'There is no next page' unless has_next_page?
self.class.new(@model,
@index,
@query,
@options.merge(continuation: keys.continuation))
end | ruby | {
"resource": ""
} |
q8006 | Statixite.CloudSync.threaded_run! | train | def threaded_run!(files, change)
return if files.empty?
file_number = 0
total_files = files.length
mutex = Mutex.new
threads = []
5.times do |i|
threads[i] = Thread.new {
until files.empty?
mutex.synchronize do
file_number += 1
Thread.current["file_number"] = file_number
end
file = files.pop rescue nil
next unless file
Rails.logger.info "[#{Thread.current["file_number"]}/#{total_files}] to #{change}..."
case change
when 'destroy'
when 'create'
when 'update'
end
end
}
end
threads.each { |t| t.join }
end | ruby | {
"resource": ""
} |
q8007 | Rattler::Compiler.RubyGenerator.intersperse | train | def intersperse(enum, opts={})
sep = opts[:sep]
newlines = opts[:newlines] || (opts[:newline] ? 1 : 0)
enum.each_with_index do |_, i|
if i > 0
self << sep if sep
newlines.times { newline }
end
yield _
end
self
end | ruby | {
"resource": ""
} |
q8008 | Clomp.Operation.get_status | train | def get_status
@result['tracks'].collect {|track| track.name if track.failure?}.compact.count.zero? ? 'Success' : 'Failure'
end | ruby | {
"resource": ""
} |
q8009 | FixedWidthFileValidator.RecordFormatter.formatted_value | train | def formatted_value(value, format, width)
value_str = value ? format(format, value) : ' ' * width # all space for nil value
length = value_str.length
if length > width
value_str.slice(0..width - 1)
elsif length < width
' ' * (width - length) + value_str
else
value_str
end
end | ruby | {
"resource": ""
} |
q8010 | JBLAS.MatrixEnumMixin.map! | train | def map!(&block)
(0...length).each do |i|
put(i, block.call(get(i)))
end
self
end | ruby | {
"resource": ""
} |
q8011 | Lanyon.Router.endpoint | train | def endpoint(path)
normalized = normalize_path_info(path)
fullpath = File.join(@root, normalized)
endpoint = if FileTest.file?(fullpath)
fullpath
elsif needs_redirect_to_dir?(fullpath)
:must_redirect
elsif FileTest.file?(fullpath_html = "#{fullpath}.html")
fullpath_html
else
:not_found
end
endpoint
end | ruby | {
"resource": ""
} |
q8012 | Lanyon.Router.custom_404_body | train | def custom_404_body
filename = File.join(root, "404.html")
File.exist?(filename) ? File.binread(filename) : nil
end | ruby | {
"resource": ""
} |
q8013 | GiantBomb.Search.filter | train | def filter(conditions)
if conditions
conditions.each do |key, value|
if self.respond_to?(key)
self.send(key, value)
end
end
end
end | ruby | {
"resource": ""
} |
q8014 | NanocConrefFS.Ancestry.find_array_parents | train | def find_array_parents(toc, title)
parents = ''
toc.each do |item|
if item.is_a?(Hash)
parents = find_hash_parents(item, title)
break unless parents.empty?
end
end
parents
end | ruby | {
"resource": ""
} |
q8015 | NanocConrefFS.Ancestry.find_hash_parents | train | def find_hash_parents(toc, title)
parents = ''
toc.each_key do |key|
next if toc[key].nil?
toc[key].each do |item|
if item.is_a?(Hash)
if item.keys.include?(title)
parents = key
break
else
if item[item.keys.first].include?(title)
parents = key
break
end
end
elsif title == item
parents = key
break
end
end
break unless parents.empty?
end
parents
end | ruby | {
"resource": ""
} |
q8016 | NanocConrefFS.Ancestry.find_array_children | train | def find_array_children(toc, title)
toc.each do |item|
next unless item.is_a?(Hash)
item.each_pair do |key, values|
if key == title
children = values.flatten
return children
end
end
end
# Found no children
Array.new
end | ruby | {
"resource": ""
} |
q8017 | NanocConrefFS.Ancestry.find_hash_children | train | def find_hash_children(toc, title)
toc.each_key do |key|
next if toc[key].nil?
toc[key].each do |item|
next unless item.is_a?(Hash)
if item[title]
children = item.values.flatten
return children
end
end
end
# Found no children
Array.new
end | ruby | {
"resource": ""
} |
q8018 | Hatt.ApiClients.hatt_add_service | train | def hatt_add_service(name, url_or_svc_cfg_hash)
svc_cfg = case url_or_svc_cfg_hash
when String
{ 'url' => url_or_svc_cfg_hash }
when Hash
url_or_svc_cfg_hash
else
raise ArgumentError, "'#{url_or_svc_cfg_hash}' is not a url string nor hash with url key"
end
init_config
services_config = hatt_configuration['hatt_services']
services_config[name] = svc_cfg
@hatt_configuration.tcfg_set 'hatt_services', services_config
@hatt_http_clients ||= {}
@hatt_http_clients[name] = Hatt::HTTP.new hatt_configuration['hatt_services'][name]
define_singleton_method name.intern do
@hatt_http_clients[name]
end
end | ruby | {
"resource": ""
} |
q8019 | ParcelApi.Address.details | train | def details(address_id)
details_url = File.join(DOMESTIC_URL, address_id.to_s)
response = @connection.get details_url
OpenStruct.new(response.parsed['address'])
end | ruby | {
"resource": ""
} |
q8020 | ParcelApi.Address.australian_details | train | def australian_details(address_id)
details_url = File.join(AUSTRALIAN_URL, address_id.to_s)
response = @connection.get details_url
RecursiveOpenStruct.new(response.parsed['address'], recurse_over_arrays: true)
end | ruby | {
"resource": ""
} |
q8021 | ParcelApi.Address.international_search | train | def international_search(query, count=5, country_code=nil)
return [] if query.length < 4
response = @connection.get INTERNATIONAL_URL, params: { q: query.to_ascii, count: count, country_code: country_code }
response.parsed['addresses'].map {|address| OpenStruct.new(address)}
end | ruby | {
"resource": ""
} |
q8022 | ParcelApi.Address.international_details | train | def international_details(address_id)
details_url = File.join(INTERNATIONAL_URL, address_id.to_s)
response = @connection.get details_url
RecursiveOpenStruct.new(response.parsed['result'], recurse_over_arrays: true)
end | ruby | {
"resource": ""
} |
q8023 | ActionKitConnector.Connector.find_petition_pages | train | def find_petition_pages(name, limit: 10, offset: 0)
target = "#{self.base_url}/petitionpage/"
options = {
basic_auth: self.auth,
query: {
_limit: limit,
_offset: offset,
name: name
}
}
self.class.get(target, options)
end | ruby | {
"resource": ""
} |
q8024 | ActionKitConnector.Connector.create_petition_page | train | def create_petition_page(name, title, lang, canonical_url)
target = "#{self.base_url}/petitionpage/"
options = {
basic_auth: self.auth,
headers: {
'Content-type' => 'application/json; charset=UTF-8'
},
:body => {
:type => 'petitionpage',
:hidden => false,
:name => name,
:title => title,
:language => lang,
:canonical_url => canonical_url
}.to_json,
format: :json
}
self.class.post(target, options)
end | ruby | {
"resource": ""
} |
q8025 | ActionKitConnector.Connector.create_action | train | def create_action(page_name, email, options={})
target = "#{self.base_url}/action/"
body = { page: page_name, email: email }.merge self.parse_action_options(options)
options = {
basic_auth: self.auth,
body: body.to_json,
format: :json,
headers: {'Content-Type' => 'application/json; charset=UTF-8'}
}
self.class.post(target, options)
end | ruby | {
"resource": ""
} |
q8026 | ActionKitConnector.Connector.create_donation_action | train | def create_donation_action(options={})
target = "#{self.base_url}/donationpush/"
options = self.validate_donation_options(options)
page_opts = {
basic_auth: self.auth,
body: options.to_json,
headers: {
'Content-Type' => 'application/json; charset=UTF-8'
}
}
self.class.post(target, page_opts)
end | ruby | {
"resource": ""
} |
q8027 | ActionKitConnector.Connector.list_users | train | def list_users(offset=0, limit=20)
target = "#{self.base_url}/user/"
options = {
basic_auth: self.auth,
query: {
_limit: limit,
_offset: offset
}
}
self.class.get(target, options)
end | ruby | {
"resource": ""
} |
q8028 | Retrospec.Config.setup_config_file | train | def setup_config_file(file=nil)
if file.nil? or ! File.exists?(file)
# config does not exist
setup_config_dir
dst_file = File.join(default_retrospec_dir, 'config.yaml')
src_file = File.join(gem_dir,'config.yaml.sample')
safe_copy_file(src_file, dst_file)
file = dst_file
end
@config_file = file
end | ruby | {
"resource": ""
} |
q8029 | SpeakyCsv.ConfigBuilder.field | train | def field(*fields, export_only: false)
@config.fields += fields.map(&:to_sym)
@config.fields.uniq!
if export_only
@config.export_only_fields += fields.map(&:to_sym)
@config.export_only_fields.uniq!
end
nil
end | ruby | {
"resource": ""
} |
q8030 | SpeakyCsv.ConfigBuilder.has_one | train | def has_one(name)
@config.root or raise NotImplementedError, "nested associations are not supported"
@config.has_ones[name.to_sym] ||= Config.new
yield self.class.new config: @config.has_ones[name.to_sym], root: false
nil
end | ruby | {
"resource": ""
} |
q8031 | SpeakyCsv.ConfigBuilder.has_many | train | def has_many(name)
@config.root or raise NotImplementedError, "nested associations are not supported"
@config.has_manys[name.to_sym] ||= Config.new
yield self.class.new config: @config.has_manys[name.to_sym], root: false
nil
end | ruby | {
"resource": ""
} |
q8032 | IceCubeCron.RuleBuilder.build_rule | train | def build_rule(expression)
rule = build_root_recurrence_rule(expression)
rule = build_year_rules(rule, expression)
rule = build_weekday_rule(rule, expression)
rule = build_day_rules(rule, expression)
rule = build_time_rules(rule, expression)
rule = rule.until(expression.until) unless expression.until.blank?
rule
end | ruby | {
"resource": ""
} |
q8033 | Hatt.JsonHelpers.jsonify | train | def jsonify(obj)
case obj
when String
JSON.pretty_generate(JSON.parse(obj))
when Hash, Array
JSON.pretty_generate(obj)
else
obj.to_s
end
rescue Exception
obj.to_s
end | ruby | {
"resource": ""
} |
q8034 | Hatt.JsonHelpers.objectify | train | def objectify(json_string)
return nil if json_string.nil? || json_string == ''
case json_string
when Hash, Array
return json_string
else
JSON.parse(json_string.to_s)
end
rescue Exception
json_string
end | ruby | {
"resource": ""
} |
q8035 | Podbay.Utils.count_values | train | def count_values(*values)
values.inject(Hash.new(0)) { |h, v| h[v] += 1; h }
end | ruby | {
"resource": ""
} |
q8036 | Podbay.Utils.get_uid | train | def get_uid(username)
Etc.passwd { |u| return u.uid if u.name == username }
end | ruby | {
"resource": ""
} |
q8037 | Podbay.Utils.get_gid | train | def get_gid(group_name)
Etc.group { |g| return g.gid if g.name == group_name }
end | ruby | {
"resource": ""
} |
q8038 | Podbay.Utils.podbay_info | train | def podbay_info(ip_address, path, timeout = 5)
JSON.parse(
get_request(
"http://#{ip_address}:#{Podbay::SERVER_INFO_PORT}/#{path}",
timeout: timeout
).body,
symbolize_names: true
)
end | ruby | {
"resource": ""
} |
q8039 | PuppetDBQuery.Updater.update2 | train | def update2
update_node_properties
logger.info "update2 started (full update)"
tsb = Time.now
source_nodes = source_node_properties.keys
destination_nodes = destination.all_nodes
delete_missing(destination_nodes, source_nodes)
errors = false
complete = source.facts
complete.each do |node, facts|
begin
destination.node_update(node, facts)
rescue
errors = true
logger.error $!
end
end
tse = Time.now
logger.info "update2 updated #{source_nodes.size} nodes in #{tse - tsb}"
destination.meta_fact_update("update2", tsb, tse) unless errors
end | ruby | {
"resource": ""
} |
q8040 | PuppetDBQuery.Updater.update_node_properties | train | def update_node_properties
logger.info "update_node_properties started"
tsb = Time.now
@source_node_properties = source.node_properties
destination.node_properties_update(source_node_properties)
tse = Time.now
logger.info "update_node_properties got #{source_node_properties.size} nodes " \
"in #{tse - tsb}"
destination.meta_node_properties_update(tsb, tse)
end | ruby | {
"resource": ""
} |
q8041 | Rack::AcceptHeaders.Encoding.matches | train | def matches(encoding)
values.select {|v|
v == encoding || v == '*'
}.sort {|a, b|
# "*" gets least precedence, any others should be equal.
a == '*' ? 1 : (b == '*' ? -1 : 0)
}
end | ruby | {
"resource": ""
} |
q8042 | Selections.BelongsToSelection.belongs_to_selection | train | def belongs_to_selection(target, options={})
belongs_to target, options.merge(:class_name => "Selection")
# The "selections" table may not exist during certain rake scenarios such as db:migrate or db:reset.
if ActiveRecord::Base.connection.table_exists? Selection.table_name
prefix = self.name.downcase
parent = Selection.where(system_code: "#{prefix}_#{target}").first
if parent
target_id = "#{target}_id".to_sym
parent.children.each do |s|
method_name = "#{s.system_code.sub("#{prefix}_", '')}?".to_sym
class_eval do
define_method method_name do
send(target_id) == s.id
end
end
end
end
end
end | ruby | {
"resource": ""
} |
q8043 | HtmlGrid.Composite.full_colspan | train | def full_colspan
raw_span = components.keys.collect{ |key|
key.at(0)
}.max.to_i
(raw_span > 0) ? raw_span + 1 : nil
end | ruby | {
"resource": ""
} |
q8044 | EM::Xmpp.Entity.subscribe | train | def subscribe(&blk)
pres = connection.presence_stanza('to'=>jid.bare, 'type' => 'subscribe')
connection.send_stanza pres, &blk
end | ruby | {
"resource": ""
} |
q8045 | EM::Xmpp.Entity.accept_subscription | train | def accept_subscription(&blk)
pres = connection.presence_stanza('to'=>jid.bare, 'type' => 'subscribed')
connection.send_stanza pres, &blk
end | ruby | {
"resource": ""
} |
q8046 | EM::Xmpp.Entity.unsubscribe | train | def unsubscribe(&blk)
pres = connection.presence_stanza('to'=>jid.bare, 'type' => 'unsubscribe')
connection.send_stanza pres, &blk
end | ruby | {
"resource": ""
} |
q8047 | EM::Xmpp.Entity.pubsub | train | def pubsub(nid=nil)
node_jid = if nid
JID.new(jid.node, jid.domain, nid)
else
jid.to_s
end
PubSub.new(connection, node_jid)
end | ruby | {
"resource": ""
} |
q8048 | EM::Xmpp.Entity.muc | train | def muc(nick=nil)
muc_jid = JID.new jid.node, jid.domain, nick
Muc.new(connection, muc_jid)
end | ruby | {
"resource": ""
} |
q8049 | EstoreConventions.ArchivedOutliers.versions_average_for_attribute | train | def versions_average_for_attribute(att, opts={})
_use_delta = opts[:delta] || false
if _use_delta
return historical_rate_per_day(att, nil, nil)
else
data = versions_complete_data_for_attribute(att, opts)
return data.e_mean
end
end | ruby | {
"resource": ""
} |
q8050 | Jia.User.phone | train | def phone
@phone ||= -> {
mac = Jia::Utils.load_data('phone_mac').sample
area_code = rand(9999).to_s.center(4, rand(9).to_s)
user_identifier = rand(9999).to_s.center(4, rand(9).to_s)
"#{mac}#{area_code}#{user_identifier}"
}.call
end | ruby | {
"resource": ""
} |
q8051 | Cyclical.Rule.match? | train | def match?(time, base)
aligned?(time, base) && @filters.all? { |f| f.match?(time) }
end | ruby | {
"resource": ""
} |
q8052 | Cyclical.Rule.potential_previous | train | def potential_previous(current, base)
@filters.map { |f| f.previous(current) }.min || current
end | ruby | {
"resource": ""
} |
q8053 | Bixby.Hashify.to_hash | train | def to_hash
self.instance_variables.inject({}) { |m,v| m[v[1,v.length].to_sym] = instance_variable_get(v); m }
end | ruby | {
"resource": ""
} |
q8054 | Coals.TaskTree.build_tasks | train | def build_tasks
load_rakefile
Rake.application.tasks.reject { |t| t.comment.nil? }
end | ruby | {
"resource": ""
} |
q8055 | LatoBlog.Post.check_lato_blog_post_parent | train | def check_lato_blog_post_parent
post_parent = LatoBlog::PostParent.find_by(id: lato_blog_post_parent_id)
if !post_parent
errors.add('Post parent', 'not exist for the post')
throw :abort
return
end
same_language_post = post_parent.posts.find_by(meta_language: meta_language)
if same_language_post && same_language_post.id != id
errors.add('Post parent', 'has another post for the same language')
throw :abort
return
end
end | ruby | {
"resource": ""
} |
q8056 | LatoBlog.Post.add_to_default_category | train | def add_to_default_category
default_category_parent = LatoBlog::CategoryParent.find_by(meta_default: true)
return unless default_category_parent
category = default_category_parent.categories.find_by(meta_language: meta_language)
return unless category
LatoBlog::CategoryPost.create(lato_blog_post_id: id, lato_blog_category_id: category.id)
end | ruby | {
"resource": ""
} |
q8057 | Scalaroid.JSONConnection.call | train | def call(function, params)
start
req = Net::HTTP::Post.new(DEFAULT_PATH)
req.add_field('Content-Type', 'application/json; charset=utf-8')
req.body = URI::encode({
:jsonrpc => :'2.0',
:method => function,
:params => params,
:id => 0 }.to_json({:ascii_only => true}))
begin
res = @conn.request(req)
if res.is_a?(Net::HTTPSuccess)
data = res.body
return JSON.parse(data)['result']
else
raise ConnectionError.new(res)
end
rescue ConnectionError => error
raise error
rescue Exception => error
raise ConnectionError.new(error)
end
end | ruby | {
"resource": ""
} |
q8058 | Caliph.CommandLine.string_format | train | def string_format
(command_environment.map do |key, value|
[key, value].join("=")
end + [command]).join(" ")
end | ruby | {
"resource": ""
} |
q8059 | LittleLogFriend.Formatter.call | train | def call ( severity, time, progname, msg )
msg = Format % [format_datetime(time), severity, $$, progname, msg2str(msg)]
msg = @@colors[severity] + msg + @@colors['DEFAULT'] if @@colorize
msg << "\n"
end | ruby | {
"resource": ""
} |
q8060 | HailHydra.TPB.search | train | def search(query, pages=1, orderby=99)
get = make_search_request(query, pages, orderby)
raise "Invalid response: #{get.response.code}" unless get.response.code == "200"
return parse_search_results(get.response.body)
end | ruby | {
"resource": ""
} |
q8061 | Skittles.Request.post | train | def post(path, options = {}, headers = {}, raw = false)
request(:post, path, options, headers, raw)
end | ruby | {
"resource": ""
} |
q8062 | Skittles.Request.put | train | def put(path, options = {}, headers = {}, raw = false)
request(:put, path, options, headers, raw)
end | ruby | {
"resource": ""
} |
q8063 | Skittles.Request.delete | train | def delete(path, options = {}, headers = {}, raw = false)
request(:delete, path, options, headers, raw)
end | ruby | {
"resource": ""
} |
q8064 | Ablerc.Option.to_stub | train | def to_stub
stub = "## #{name}\n"
stub << "# #{description}\n" unless description.nil?
stub << "#{entry_for_refuse_allow_behavior}\n" unless refuses.nil? and allows.nil?
stub << "#{entry_for_key_value}\n"
stub << "\n"
end | ruby | {
"resource": ""
} |
q8065 | Notifaction.Style.format | train | def format(message, colour = nil, style = nil)
c = @map[:colour][colour.to_sym] unless colour.nil?
if style.nil?
t = 0
else
t = @map[:style][style.to_sym]
end
"\e[#{t};#{c}m#{message}\e[0m"
end | ruby | {
"resource": ""
} |
q8066 | Vigilem::FFI.ArrayPointerSync.update | train | def update
if (results = what_changed?)[:ary]
update_ptr
update_ary_cache
true
elsif results[:ptr]
update_ary
update_ptr_cache
true
else
false
end
end | ruby | {
"resource": ""
} |
q8067 | Vigilem::FFI.ArrayPointerSync.update_ptr | train | def update_ptr
ptr.clear
if (not (arry_type = self.class.ary_type).is_a?(Symbol))
if arry_type.respond_to? :to_native
ary.each {|item| ptr.write_pointer(arry_type.to_native(item, nil)) }
elsif arry_type.method_defined? :bytes
ptr.write_bytes(ary.map {|item| item.respond.bytes }.join)
elsif arry_type.method_defined? :pointer
ary.each do |item|
if item.size == item.pointer.size
ptr.write_bytes((itm_ptr = item.pointer).read_bytes(itm_ptr.size))
else
raise ArgumentError, "Cannot reliably convert `#{item}' to a native_type"
end
end
else
raise ArgumentError, "Cannot reliably convert `#{arry_type}' to a native_type"
end
else
Utils.put_array_typedef(ptr, arry_type, ary)
end
update_ptr_cache
#self.ptr_cache_hash = @bytes.hash # @FIXME ptr_hash() and @bytes.hash should be the same...
end | ruby | {
"resource": ""
} |
q8068 | ESS.Element.method_missing | train | def method_missing m, *args, &block
if method_name_is_tag_name? m
return assign_tag(m, args, &block)
elsif method_name_is_tag_adder_method? m
return extend_tag_list(m, args, &block)
elsif method_name_is_tag_list_method? m
return child_tags[m[0..-6].to_sym] ||= []
elsif method_name_is_attr_accessor_method? m
return assign_attribute(m[0..-6].to_sym, args, &block)
end
super(m, *args, &block)
end | ruby | {
"resource": ""
} |
q8069 | MediaWiki.Page.summary | train | def summary
text_array = to_text.split("\n")
text = text_array[0]
i = 1
while text.length <= 140 && i < text_array.length
text << "\n" + text_array[i]
i += 1
end
text
end | ruby | {
"resource": ""
} |
q8070 | OauthProviderEngine.RequestToken.upgrade! | train | def upgrade!
access_token = nil
transaction do
access_token = OauthProviderEngine::AccessToken.create!({
:application_id => self.application_id,
:user_id => self.user_id,
})
self.destroy || raise(ActiveRecord::Rollback)
end
return access_token
end | ruby | {
"resource": ""
} |
q8071 | SortIndex.File.sorted_puts | train | def sorted_puts(line)
if line == nil || line.size == 0
raise ArgumentError, 'Line cannot be blank!'
end
if line.index($/)
raise ArgumentError, "Cannot `puts` a line with extra line endings. Make sure the line does not contain `#{$/.inspect}`"
end
matched, idx = binary_seek(line)
if matched
# an exact match was found, nothing to do
else
if idx == nil
# append to end of file
self.seek(0, IO::SEEK_END)
puts(line)
else
self.seek(cached_positions[idx][0], IO::SEEK_SET)
do_at_current_position{puts(line)}
end
update_cached_position(idx, line)
end
nil
end | ruby | {
"resource": ""
} |
q8072 | SortIndex.File.index_each_line | train | def index_each_line
positions = []
size = 0
each_line do |line|
positions << [size, line.size]
size += line.size
end
rewind
positions
end | ruby | {
"resource": ""
} |
q8073 | SortIndex.File.do_at_current_position | train | def do_at_current_position(&block)
current_position = self.tell
huge_buffer = self.read
self.seek(current_position, IO::SEEK_SET)
block.call
ensure
self.write huge_buffer
end | ruby | {
"resource": ""
} |
q8074 | TheBigDB.Request.prepare | train | def prepare(method, request_uri, params = {})
method = method.downcase.to_s
if TheBigDB.api_key.is_a?(String) and !TheBigDB.api_key.empty?
params.merge!("api_key" => TheBigDB.api_key)
end
# we add the API version to the URL, with a trailing slash and the rest of the request
request_uri = "/v#{TheBigDB.api_version}" + (request_uri.start_with?("/") ? request_uri : "/#{request_uri}")
if method == "get"
encoded_params = TheBigDB::Helpers::serialize_query_params(params)
@http_request = Net::HTTP::Get.new(request_uri + "?" + encoded_params)
elsif method == "post"
@http_request = Net::HTTP::Post.new(request_uri)
@http_request.set_form_data(TheBigDB::Helpers::flatten_params_keys(params))
else
raise ArgumentError, "The request method must be 'get' or 'post'"
end
@http_request["user-agent"] = "TheBigDB RubyWrapper/#{TheBigDB::VERSION::STRING}"
client_user_agent = {
"publisher" => "thebigdb",
"version" => TheBigDB::VERSION::STRING,
"language" => "ruby",
"language_version" => "#{RUBY_VERSION} p#{RUBY_PATCHLEVEL} (#{RUBY_RELEASE_DATE})",
}
@http_request["X-TheBigDB-Client-User-Agent"] = JSON(client_user_agent)
self
end | ruby | {
"resource": ""
} |
q8075 | TheBigDB.Request.execute | train | def execute
# Here is the order of operations:
# -> setting @data_sent
# -> executing before_request_execution callback
# -> executing the HTTP request
# -> setting @response
# -> setting @data_received
# -> executing after_request_execution callback
# Setting @data_sent
params = Rack::Utils.parse_nested_query(URI.parse(@http_request.path).query)
# Since that's how it will be interpreted anyway on the server, we merge the POST params to the GET params,
# but it's not supposed to happen: either every params is prepared for GET/query params, or as POST body
params.merge!(Rack::Utils.parse_nested_query(@http_request.body.to_s))
# About: Hash[{}.map{|k,v| [k, v.join] }]
# it transforms the following hash:
# {"accept"=>["*/*"], "user-agent"=>["TheBigDB RubyWrapper/X.Y.Z"], "host"=>["computer.host"]}
# into the following hash:
# {"accept"=>"*/*", "user-agent"=>"TheBigDB RubyWrapper/X.Y.Z", "host"=>"computer.host"}
# which is way more useful and cleaner.
@data_sent = {
"headers" => Hash[@http_request.to_hash.map{|k,v| [k, v.join] }],
"host" => @http.address,
"port" => @http.port,
"path" => URI.parse(@http_request.path).path,
"method" => @http_request.method,
"params" => params
}
# Executing callback
TheBigDB.before_request_execution.call(self)
# Here is where the request is actually executed
@http_response = TheBigDB.http_request_executor.call(@http, @http_request)
# Setting @response
begin
# We parse the JSON answer and return it.
@response = JSON(@http_response.body)
rescue JSON::ParserError => e
@response = {"status" => "error", "error" => {"code" => "0000", "description" => "The server gave an invalid JSON body:\n#{@http_response.body}"}}
end
# Setting @data_received
@data_received = {
"headers" => Hash[@http_response.to_hash.map{|k,v| [k, v.join] }],
"content" => @response
}
# Executing callback
TheBigDB.after_request_execution.call(self)
# Raising exception if asked
if TheBigDB.raise_on_api_status_error and @response["status"] == "error"
raise ApiStatusError.new(@response["error"]["code"])
end
self
end | ruby | {
"resource": ""
} |
q8076 | Aker.GroupMemberships.find | train | def find(group, *affiliate_ids)
candidates = self.select { |gm| gm.group.include?(group) }
return candidates if affiliate_ids.empty?
candidates.select { |gm| affiliate_ids.detect { |id| gm.include_affiliate?(id) } }
end | ruby | {
"resource": ""
} |
q8077 | RailsKvsDriver.Session.session | train | def session(driver_config, &block)
driver_config = validate_driver_config!(driver_config)
driver_connection_pool(self, driver_config).with do |kvs_instance|
block.call(self.new(kvs_instance, driver_config))
end
end | ruby | {
"resource": ""
} |
q8078 | RailsKvsDriver.Session.driver_connection_pool | train | def driver_connection_pool(driver_class, driver_config)
pool = search_driver_connection_pool(driver_class, driver_config)
return (pool.nil?) ? set_driver_connection_pool(driver_class, driver_config) : pool
end | ruby | {
"resource": ""
} |
q8079 | RailsKvsDriver.Session.set_driver_connection_pool | train | def set_driver_connection_pool(driver_class, driver_config)
conf = {
size: driver_config[:pool_size],
timeout: driver_config[:timeout_sec]
}
pool = ConnectionPool.new(conf) { driver_class.connect(driver_config) }
RailsKvsDriver::KVS_CONNECTION_POOL[driver_class.name] ||= Array.new
RailsKvsDriver::KVS_CONNECTION_POOL[driver_class.name].push({config: driver_config, pool: pool})
return pool
end | ruby | {
"resource": ""
} |
q8080 | RailsKvsDriver.Session.search_driver_connection_pool | train | def search_driver_connection_pool(driver_class, driver_config)
if RailsKvsDriver::KVS_CONNECTION_POOL.has_key?(driver_class.name)
RailsKvsDriver::KVS_CONNECTION_POOL[driver_class.name].each do |pool_set|
return pool_set[:pool] if equal_driver_config?(pool_set[:config], driver_config)
end
end
return nil
end | ruby | {
"resource": ""
} |
q8081 | RailsKvsDriver.Session.equal_driver_config? | train | def equal_driver_config?(config1, config2)
return false unless config1[:host] == config2[:host]
return false unless config1[:port] == config2[:port]
return false unless config1[:timeout_sec] == config2[:timeout_sec]
return false unless config1[:pool_size] == config2[:pool_size]
return false unless config1[:config_key] == config2[:config_key]
return true
end | ruby | {
"resource": ""
} |
q8082 | Crisp.FunctionRunner.validate_args_count | train | def validate_args_count(expected, got)
if (expected.is_a?(Numeric) and expected != got) or
(expected.is_a?(Range) and !(expected === got))
raise ArgumentError, "wrong number of arguments for '#{name}' (#{got} for #{expected})"
end
end | ruby | {
"resource": ""
} |
q8083 | Irie.ClassMethods.extensions! | train | def extensions!(*extension_syms)
return extension_syms if extension_syms.length == 0
extension_syms = extension_syms.flatten.collect {|es| es.to_sym}.compact
if extension_syms.include?(:all)
ordered_extension_syms = self.extension_include_order.dup
else
extensions_without_defined_order = extension_syms.uniq - self.extension_include_order.uniq
if extensions_without_defined_order.length > 0
raise ::Irie::ConfigurationError.new "The following must be added to the self.extension_include_order array in Irie configuration: #{extensions_without_defined_order.collect(&:inspect).join(', ')}"
else
ordered_extension_syms = self.extension_include_order & extension_syms
end
end
# load requested extensions
ordered_extension_syms.each do |arg_sym|
if module_class_name = self.available_extensions[arg_sym]
begin
::Irie.logger.debug("[Irie] Irie::ClassMethods.extensions! #{self} including #{module_class_name}") if ::Irie.debug?
include module_class_name.constantize
rescue NameError => e
raise ::Irie::ConfigurationError.new "Failed to constantize '#{module_class_name}' with extension key #{arg_sym.inspect} in self.available_extensions. Error: \n#{e.message}\n#{e.backtrace.join("\n")}"
end
else
raise ::Irie::ConfigurationError.new "#{arg_sym.inspect} isn't defined in self.available_extensions"
end
end
extension_syms
end | ruby | {
"resource": ""
} |
q8084 | Humpyard.ElementsController.new | train | def new
@element = Humpyard::config.element_types[params[:type]].new(
:page_id => params[:page_id],
:container_id => params[:container_id].to_i > 0 ? params[:container_id].to_i : nil,
:page_yield_name => params[:yield_name].blank? ? 'main' : params[:yield_name],
:shared_state => 0)
authorize! :create, @element.element
@element_type = params[:type]
@prev = Humpyard::Element.find_by_id(params[:prev_id])
@next = Humpyard::Element.find_by_id(params[:next_id])
render :partial => 'edit'
end | ruby | {
"resource": ""
} |
q8085 | Humpyard.ElementsController.create | train | def create
@element = Humpyard::config.element_types[params[:type]].new params[:element]
unless can? :create, @element.element
render :json => {
:status => :failed
}, :status => 403
return
end
if @element.save
@prev = Humpyard::Element.find_by_id(params[:prev_id])
@next = Humpyard::Element.find_by_id(params[:next_id])
do_move(@element, @prev, @next)
insert_options = {
:element => "hy-id-#{@element.element.id}",
:url => humpyard_element_path(@element.element),
:parent => @element.container ? "hy-id-#{@element.container.id}" : "hy-content-#{@element.page_yield_name}"
}
insert_options[:before] = "hy-id-#{@next.id}" if @next
insert_options[:after] = "hy-id-#{@prev.id}" if not @next and @prev
render :json => {
:status => :ok,
:dialog => :close,
:insert => [insert_options]
}
else
render :json => {
:status => :failed,
:errors => @element.errors
}
end
end | ruby | {
"resource": ""
} |
q8086 | Humpyard.ElementsController.update | train | def update
@element = Humpyard::Element.find(params[:id])
if @element
unless can? :update, @element
render :json => {
:status => :failed
}, :status => 403
return
end
if @element.content_data.update_attributes params[:element]
render :json => {
:status => :ok,
:dialog => :close,
:replace => [
{
:element => "hy-id-#{@element.id}",
:url => humpyard_element_path(@element)
}
]
}
else
render :json => {
:status => :failed,
:errors => @element.content_data.errors
}
end
else
render :json => {
:status => :failed
}, :status => 404
end
end | ruby | {
"resource": ""
} |
q8087 | Humpyard.ElementsController.move | train | def move
@element = Humpyard::Element.find(params[:id])
if @element
unless can? :update, @element
render :json => {
:status => :failed
}, :status => 403
return
end
@element.update_attributes(
:container => Humpyard::Element.find_by_id(params[:container_id]),
:page_yield_name => params[:yield_name]
)
@prev = Humpyard::Element.find_by_id(params[:prev_id])
@next = Humpyard::Element.find_by_id(params[:next_id])
do_move(@element, @prev, @next)
render :json => {
:status => :ok
}
else
render :json => {
:status => :failed
}, :status => 404
end
end | ruby | {
"resource": ""
} |
q8088 | Aker.Configuration.api_modes= | train | def api_modes=(*new_modes)
new_modes = new_modes.first if new_modes.size == 1 && Array === new_modes.first
@api_modes = new_modes.compact.collect(&:to_sym)
end | ruby | {
"resource": ""
} |
q8089 | Aker.Configuration.central | train | def central(filename)
params = ::Aker::CentralParameters.new(filename)
params.each { |k, v| add_parameters_for(k, v) }
end | ruby | {
"resource": ""
} |
q8090 | Adept.Core.configure | train | def configure(elf_file, target=@targets.keys.first)
#Ensure the target is a string.
target = target.to_s
#Get the path to the bitfile and memory map which will be used to generate the new bitfile.
memory_map = "#@base_path/#{@targets[target]['memory_map']}"
bit_file = "#@base_path/#{@targets[target]['bit_file']}"
p target, bit_file
#Generate the new raw bitfile...
hex = with_temporary_files { |dest, _| system("avr-objcopy -O ihex -R .eeprom -R .fuse -R .lock #{elf_file} #{dest}") }
mem = with_temporary_files(hex, '.mem', '.hex') { |dest, source| system("srec_cat #{source} -Intel -Byte_Swap 2 -Data_Only -Line_Length 100000000 -o #{dest} -vmem 8") }
bit = with_temporary_files(mem, '.bit', '.mem') { |dest, source| system("data2mem -bm #{memory_map} -bt #{bit_file} -bd #{source} -o b #{dest}") }
#... wrap it in a Bitstream object, and return it.
Adept::DataFormats::Bitstream.from_string(bit)
end | ruby | {
"resource": ""
} |
q8091 | Adept.Core.with_temporary_files | train | def with_temporary_files(file_contents='', dest_extension = '', source_extension = '', message=nil)
#File mode for all of the created temporary files.
#Create the files, and allow read/write, but do not lock for exclusive access.
file_mode = File::CREAT | File::RDWR
#Create a new file which contains the provided file content.
#Used to pass arbitrary data into an external tool.
Tempfile.open(['core_prev', source_extension], :mode => file_mode) do |source_file|
#Fill the source file with the provided file contents...
source_file.write(file_contents)
source_file.flush
#Create a new file which will store the resultant file content.
Tempfile.open(['core_next', dest_extension], :mode => file_mode) do |destination_file|
#Yield the file's paths the provided block.
raise CommandFailedError, message unless yield [destination_file.path, source_file.path]
#And return the content of the destination file.
return File::read(destination_file)
end
end
end | ruby | {
"resource": ""
} |
q8092 | Cheers.Color.to_s | train | def to_s
return '#' + r.to_s(16).rjust(2, '0') +
g.to_s(16).rjust(2, '0') +
b.to_s(16).rjust(2, '0')
end | ruby | {
"resource": ""
} |
q8093 | Plangrade.Client.request | train | def request(method, path, params={})
headers = @default_headers.merge({'Authorization' => "Bearer #{@access_token}"})
result = http_client.send_request(method, path, {
:params => params,
:headers => headers
})
result
end | ruby | {
"resource": ""
} |
q8094 | VirtualMonkey.Application.app_servers | train | def app_servers
ret = @servers.select { |s| s.nickname =~ /App Server/ }
raise "No app servers in deployment" unless ret.length > 0
ret
end | ruby | {
"resource": ""
} |
q8095 | RVideo.Inspector.duration | train | def duration
return nil unless valid?
units = raw_duration.split(":")
(units[0].to_i * 60 * 60 * 1000) + (units[1].to_i * 60 * 1000) + (units[2].to_f * 1000).to_i
end | ruby | {
"resource": ""
} |
q8096 | EmailChecker.Domain.valid? | train | def valid?
return false unless @domain
Timeout.timeout(SERVER_TIMEOUT) do
return true if valid_mx_records?
return true if a_records?
end
rescue Timeout::Error, Errno::ECONNREFUSED
false
end | ruby | {
"resource": ""
} |
q8097 | EmailChecker.Domain.valid_mx_records? | train | def valid_mx_records?
mx_servers.each do |server|
exchange_a_records = dns.getresources(server[:address], Resolv::DNS::Resource::IN::A)
return true if exchange_a_records.any?
end
false
end | ruby | {
"resource": ""
} |
q8098 | EmailChecker.Domain.mx_servers | train | def mx_servers
return @mx_servers if @mx_servers
@mx_servers = []
mx_records.each do |mx|
@mx_servers.push(preference: mx.preference, address: mx.exchange.to_s)
end
@mx_servers
end | ruby | {
"resource": ""
} |
q8099 | BitMagic.BitsGenerator.bits_for | train | def bits_for(*field_names)
bits = []
field_names.flatten.each do |i|
if i.is_a?(Integer)
bits << i
next
end
if i.respond_to?(:to_sym) and @field_list[i.to_sym]
bits << @field_list[i.to_sym]
end
end
bits.flatten
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.