query stringlengths 7 9.55k | document stringlengths 10 363k | metadata dict | negatives listlengths 0 101 | negative_scores listlengths 0 101 | document_score stringlengths 3 10 | document_rank stringclasses 102
values |
|---|---|---|---|---|---|---|
The 'human name' of the model, if different from the actual model name. | def post_type; model_name; end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def human_name\n cn = name\n\n if respond_to?(:is_dynamic_model) && is_dynamic_model || respond_to?(:is_activity_log) && is_activity_log\n cn = cn.split('::').last\n end\n\n cn.underscore.humanize.captionize\n end",
"def human_model_name\n model_name.humanize\n end",
"def huma... | [
"0.8473744",
"0.83515793",
"0.83515793",
"0.83297586",
"0.82164854",
"0.8119618",
"0.793476",
"0.78004855",
"0.77432376",
"0.77353776",
"0.76857543",
"0.76778156",
"0.7649287",
"0.7620736",
"0.7612795",
"0.758491",
"0.75530016",
"0.7548967",
"0.75357574",
"0.753542",
"0.75354... | 0.0 | -1 |
The model class associated with the controller. | def model; eval model_name; end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def model_class\n @model_class ||= controller_path.classify.constantize\n end",
"def model_class\n @controller.model_class\n end",
"def model\n controller_name.classify.constantize\n end",
"def model_class\n return @model_class || self.class.model_class\n end",... | [
"0.8602606",
"0.8306242",
"0.8306108",
"0.8305672",
"0.82830715",
"0.8181403",
"0.8161984",
"0.8133908",
"0.7987708",
"0.7917064",
"0.7890843",
"0.7889863",
"0.7848757",
"0.7848757",
"0.77857494",
"0.7785643",
"0.77540207",
"0.77540207",
"0.774854",
"0.76986045",
"0.76372683"... | 0.0 | -1 |
Before filter to bail unless the user has permission to edit the post. | def check_permissions
unless can_edit? @post
flash.now[:warning] = "You can't edit that post."
redirect_to :action => 'show'
return false
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def authorized_to_edit?(post)\n post.user == current_user\n end",
"def authorized_to_edit?(post)\n post.user == current_user\n end",
"def check_permissions\n unless can_edit? @post\n flash[:warning] = \"You can't edit that post.\"\n redirect_to :action => 'show'\n retu... | [
"0.7753937",
"0.7753937",
"0.7571513",
"0.74274355",
"0.7154308",
"0.7068018",
"0.70675576",
"0.7046281",
"0.69860834",
"0.69762564",
"0.6947586",
"0.6856135",
"0.68221307",
"0.6757768",
"0.67445046",
"0.6735497",
"0.6727419",
"0.67264026",
"0.6703153",
"0.6648879",
"0.664291... | 0.75836104 | 2 |
For example, [1,2,3] have the following permutations: [ [1,2,3], [1,3,2], [2,1,3], [2,3,1], [3,1,2], [3,2,1] ] Mental Model: Build the solution as you go but only return when the solution reaches a size equal to the size of the input array Since the solution must have distinct values, you can use a deadend condition to skip values already in the current solution and back track When you reach a solution thats equal to the size of the input array (youll also be at a leaf node), you add it to the results, and then pop back to the last recursive call | def permute(nums, solution = [], results = []
return results << solution.clone if solution.size == nums.size
nums.each do |num|
next if solution.include?(num)
solution << num
permute(nums, solution, results)
solution.pop
end
results
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def permutations(array)\n return [array] if array.length <= 1\n \n \n # Similar to the subsets problem, we observe that to get the permutations\n # of [1, 2, 3] we can look at the permutations of [1, 2] which are\n # [1, 2] and [2, 1] and add the last element to every possible index getting\n ... | [
"0.7914453",
"0.78916126",
"0.787635",
"0.7588981",
"0.7549795",
"0.745429",
"0.7327684",
"0.7281294",
"0.7268896",
"0.7238501",
"0.71684736",
"0.71532094",
"0.7134002",
"0.71337885",
"0.7091431",
"0.708485",
"0.706667",
"0.6993153",
"0.6992714",
"0.69472957",
"0.6945972",
... | 0.70612013 | 17 |
Replace this with your real tests. | def test_truth
assert true
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def testing\n # ...\n end",
"def __dummy_test__\n end",
"def tests; end",
"def tests; end",
"def spec; end",
"def spec; end",
"def self_test; end",
"def self_test; end",
"def test \n end",
"def test_0_dummy\n\t\tend",
"def test\n\n end",
"def test\n end",
"def test\n end"... | [
"0.7444841",
"0.6954491",
"0.6913394",
"0.6913394",
"0.6863823",
"0.6863823",
"0.66389537",
"0.66389537",
"0.66238844",
"0.6545616",
"0.6523148",
"0.64830077",
"0.64830077",
"0.64830077",
"0.6406177",
"0.6389718",
"0.6389718",
"0.6389718",
"0.6389718",
"0.6389718",
"0.6389718... | 0.0 | -1 |
Show invalid properties with the reasons. Usually used together with valid? | def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def list_invalid_properties\n invalid_properties = super\n if @class_id.nil?\n invalid_properties.push('invalid value for \"class_id\", class_id cannot be nil.')\n end\n\n if @object_type.nil?\n invalid_properties.push('invalid value for \"object_type\", object_type cannot be nil.... | [
"0.7648941",
"0.7648941",
"0.7648941",
"0.7648941",
"0.7636562",
"0.7636562",
"0.7636562",
"0.7636562",
"0.7636562",
"0.7636562",
"0.7636562",
"0.7636562",
"0.73556465",
"0.7333955",
"0.72676647",
"0.72381455",
"0.72310346",
"0.72247696",
"0.72072345",
"0.7175248"
] | 0.7169149 | 91 |
Check to see if the all the properties in the model are valid | def valid?
true
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def validate_properties\n true\n end",
"def validate_properties\n true\n end",
"def validate\n super\n\n check_optional_property :collection, String\n check_optional_property :create, String\n check_optional_property :delete, String\n check_optional_property :... | [
"0.78992486",
"0.78992486",
"0.70971805",
"0.70782334",
"0.7032205",
"0.7031276",
"0.69510347",
"0.6869891",
"0.6858077",
"0.6858077",
"0.68287027",
"0.6823878",
"0.6820306",
"0.68144894",
"0.6794656",
"0.6752167",
"0.66843414",
"0.6676546",
"0.6667755",
"0.66296124",
"0.6618... | 0.0 | -1 |
Checks equality by comparing each attribute. | def ==(o)
return true if self.equal?(o)
self.class == o.class &&
is_lockout_policy_enabled == o.is_lockout_policy_enabled &&
login_attempt_threshold == o.login_attempt_threshold &&
login_attempt_reset_interval_in_minutes == o.login_attempt_reset_interval_in_minutes &&
lockout_interval_in_minutes == o.lockout_interval_in_minutes &&
disable_lockout_for_sa == o.disable_lockout_for_sa &&
is_expiration_policy_enabled == o.is_expiration_policy_enabled &&
expiration_days == o.expiration_days &&
is_expiration_reminders_enabled == o.is_expiration_reminders_enabled &&
expiration_first_reminder_days == o.expiration_first_reminder_days &&
expiration_reminder_days == o.expiration_reminder_days &&
is_minimum_length_required == o.is_minimum_length_required &&
minimum_length == o.minimum_length &&
is_numeric_characters_required == o.is_numeric_characters_required &&
is_special_characters_required == o.is_special_characters_required &&
is_upper_and_lower_case_required == o.is_upper_and_lower_case_required &&
can_be_same_as_user_name == o.can_be_same_as_user_name &&
is_history_policy_enabled == o.is_history_policy_enabled &&
history_password_count == o.history_password_count &&
minimum_password_age == o.minimum_password_age &&
is_prohibited_password_policy_enabled == o.is_prohibited_password_policy_enabled &&
is_inactivity_timeout_policy_enabled == o.is_inactivity_timeout_policy_enabled &&
inactivity_timeout_in_minutes == o.inactivity_timeout_in_minutes
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def ==(other)\n attributes == other.attributes\n end",
"def ==(other) # :nodoc:\n @attrs == other.attrs\n end",
"def eql?(other)\n return true if self == other\n @@ATTRIBUTES.each do |att|\n return false unless self.send(att).eql?(other.send(att))\n end\n true\n en... | [
"0.7291717",
"0.7188103",
"0.70395297",
"0.7007927",
"0.68874705",
"0.6861532",
"0.6707156",
"0.6660597",
"0.66147524",
"0.658478",
"0.6584619",
"0.6580019",
"0.65543133",
"0.6543933",
"0.65068495",
"0.6479513",
"0.6456241",
"0.6415999",
"0.6412208",
"0.6412208",
"0.6412208",... | 0.0 | -1 |
Calculates hash code according to all attributes. | def hash
[is_lockout_policy_enabled, login_attempt_threshold, login_attempt_reset_interval_in_minutes, lockout_interval_in_minutes, disable_lockout_for_sa, is_expiration_policy_enabled, expiration_days, is_expiration_reminders_enabled, expiration_first_reminder_days, expiration_reminder_days, is_minimum_length_required, minimum_length, is_numeric_characters_required, is_special_characters_required, is_upper_and_lower_case_required, can_be_same_as_user_name, is_history_policy_enabled, history_password_count, minimum_password_age, is_prohibited_password_policy_enabled, is_inactivity_timeout_policy_enabled, inactivity_timeout_in_minutes].hash
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def attr_hash\n Digest::MD5.hexdigest(\"#{@name}:#{@ruby_type}\")\n end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash\n code = 17\n code = 37*code + @x.hash\n code = 37*code + @y.hash\n ... | [
"0.71201754",
"0.70382947",
"0.70382947",
"0.70382947",
"0.70382947",
"0.70382947",
"0.70382947",
"0.70382947",
"0.68936527",
"0.67828596",
"0.67061776",
"0.66981995",
"0.66876984",
"0.6668829",
"0.6487257",
"0.64606065",
"0.64606065",
"0.64434665",
"0.64101994",
"0.639494",
... | 0.0 | -1 |
Builds the object from hash | def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def build(hash)\n obj = new\n hash.each_pair do |k,v|\n obj[k] = v if variables[k]\n end\n return obj\n end",
"def build_from_hash(attributes)\n\n end",
"def build_from_hash(hash)\n instance = self.new\n\n # Add the instance attributes dynamically ... | [
"0.8011074",
"0.7470833",
"0.7457607",
"0.7256629",
"0.72455454",
"0.70060325",
"0.6973257",
"0.6955014",
"0.69459796",
"0.69398683",
"0.69363195",
"0.6917627",
"0.6872358",
"0.6796184",
"0.6783521",
"0.67575246",
"0.67575246",
"0.67560464",
"0.67514306",
"0.67136854",
"0.666... | 0.0 | -1 |
Deserializes the data based on type | def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = SwaggerClient.const_get(type).new
temp_model.build_from_hash(value)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s... | [
"0.7330926",
"0.7274019",
"0.72504056",
"0.7245751",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
... | 0.0 | -1 |
Returns the string representation of the object | def to_s
to_hash.to_s
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def to_s\n @object.to_s\n end",
"def to_s\n object.to_s\n end",
"def serialize(object)\n object.to_s\n end",
"def to_s\n self.inspect\n end",
"def to_s\n @string || @object.to_s('F')\n end",
"def to_s\n @string || @object.to_s('F')\n end",
"de... | [
"0.901024",
"0.89506465",
"0.84703195",
"0.83409667",
"0.8337169",
"0.8337169",
"0.8332247",
"0.82546586",
"0.8145818",
"0.8144667",
"0.81357557",
"0.812714",
"0.8093436",
"0.8086725",
"0.8073356",
"0.8039774",
"0.80308646",
"0.80064154",
"0.80064154",
"0.80064154",
"0.800641... | 0.0 | -1 |
to_body is an alias to to_hash (backward compatibility) | def to_body
to_hash
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def to_body\r\n to_hash\r\n end",
"def to_body\n to_hash\nend",
"def to_body\n to_hash\nend"
] | [
"0.84292203",
"0.8345769",
"0.8345769"
] | 0.0 | -1 |
Returns the object in the form of hash | def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def to_hash\n object\n end",
"def hash\r\n return to_s.hash\r\n end",
"def hash\n to_a.hash\n end",
"def hash\n [_hash, name, owner].hash\n end",
"def hash\n return to_s.hash\n end",
"def hash\n @hash\n end",
"def hash\n @hash.hash\n end",
"def hash\n ... | [
"0.8270299",
"0.78767854",
"0.78726953",
"0.7802364",
"0.7789188",
"0.77806795",
"0.7775915",
"0.7767511",
"0.7760525",
"0.7760525",
"0.77559966",
"0.7731286",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",... | 0.0 | -1 |
Outputs nonarray value in the form of hash For object, use to_hash. Otherwise, just return the value | def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def hash\n [value].hash\n end",
"def hash\n [value].hash\n end",
"def hash\n\t\tvalue.hash\n\tend",
"def hash\n value.hash\n end",
"def hash\n @value.hash\n end",
"def hash\r\n return to_s.hash\r\n end",
"def to_hash\n @value\n end",
"def to_hash\n @va... | [
"0.6719518",
"0.6719518",
"0.666832",
"0.66565555",
"0.6586841",
"0.6452931",
"0.6414911",
"0.6414911",
"0.6382046",
"0.6346188",
"0.6302933",
"0.62237245",
"0.6151989",
"0.6101756",
"0.60795677",
"0.60795677",
"0.60717124",
"0.6035991",
"0.6021168",
"0.5936472",
"0.5903488",... | 0.0 | -1 |
July15 used by _new_trans..form | def ledger_options
options = ''
current_user.ledgers.each do |ledger|
options += content_tag(:option, ledger.user_tag, value: ledger.id)
end
options.html_safe
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def apply_locale; end",
"def translations; end",
"def translated_month_names; end",
"def translate_date\n (self.origin_date).strftime(\"%b %Y\")\n end",
"def originalsourceform; end",
"def init_translations; end",
"def label_translation; end",
"def transact; end",
"def transformations; end",
... | [
"0.6187041",
"0.61536145",
"0.59390056",
"0.5856631",
"0.583345",
"0.57535934",
"0.5673085",
"0.56613725",
"0.5606845",
"0.55839807",
"0.5561636",
"0.5561636",
"0.5561636",
"0.5561636",
"0.5561636",
"0.5561636",
"0.5561636",
"0.5506269",
"0.55037665",
"0.54669863",
"0.5457788... | 0.0 | -1 |
def between_june_to_august? june_to_august.member?(current_month) end | def range
beginning_of_period..(self.next_year).beginning_of_period
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def month? = unit == 'month'",
"def could_be_month?(month); end",
"def is_current_month(date)\n currentMonth = Time.now.month\n dateMonth = date.month\n dateMonth.between?(currentMonth, currentMonth)\n end",
"def month_person? = unit == 'month-person'",
"def current_month?\n self.mon... | [
"0.7912627",
"0.7596977",
"0.75474304",
"0.7402696",
"0.72715855",
"0.7092502",
"0.699987",
"0.68860364",
"0.6834357",
"0.6828211",
"0.680119",
"0.67861795",
"0.6726545",
"0.6692705",
"0.665147",
"0.66347694",
"0.6631216",
"0.66232073",
"0.66008395",
"0.6553777",
"0.6522336",... | 0.0 | -1 |
snap distance in world units attractive snapping: move the entity to the guide | def snap(entity, guide)
p = entity[:physics].body.p.clone
x = guide[:physics].body.p.clone
# TODO: snapping should probably be based on screen-space units, rather than world space
if p.dist x < THRESHOLD
entity[:physics].body.p = x
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def snap()\n \n end",
"def snap_offset\n @y += @off_y / 32\n @x += @off_x / 32\n @real_x = @x * 128\n @real_y = @y * 128\n @off_y = 0#@off_y % 32\n @off_x = 0#@off_x % 32\n end",
"def move_dist(deltx, delty)\n @x += deltx\n @y += delty\n normalize\n end",
"def m... | [
"0.62069064",
"0.61743164",
"0.5829714",
"0.5810272",
"0.5738337",
"0.5672736",
"0.5659968",
"0.5602787",
"0.5532215",
"0.5467983",
"0.5419396",
"0.54163206",
"0.5371832",
"0.53711545",
"0.5367201",
"0.53665096",
"0.5362233",
"0.5360298",
"0.5356642",
"0.53473645",
"0.5334186... | 0.8023953 | 0 |
Groups, sorts and builds list components for statutory_instruments | def sort_and_build_components(statutory_instruments: nil, small: false)
grouping_block = proc { |statutory_instrument| LayingDateHelper.get_date(statutory_instrument) }
sorted_statutory_instruments = GroupSortHelper.group_and_sort(statutory_instruments, group_block: grouping_block, key_sort_descending: true, sort_method_symbols: %i[laidThingName])
build_components(statutory_instruments: sorted_statutory_instruments, small: small)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def collect_group_details\n cmd = 'lsgroup -a ALL' # get all group names\n result ||= inspec.backend.run_command(cmd)\n return [] if result.exit_status.to_i != 0\n names = result.stdout.split(\"\\n\")\n groups_cache = []\n names.sort.uniq.each do |n|\n groups_cache << AixGroup(inspec, n)\n ... | [
"0.5581342",
"0.54537064",
"0.54271287",
"0.54271287",
"0.54271287",
"0.5369767",
"0.5311323",
"0.5185333",
"0.5180616",
"0.5170448",
"0.50878435",
"0.5070816",
"0.50560904",
"0.4989491",
"0.49635905",
"0.49584764",
"0.49576688",
"0.4918784",
"0.49024788",
"0.48813635",
"0.48... | 0.64436966 | 0 |
send a signup email to the user, pass in the user object that contains the user's email address | def notify_email_guest(reservation)
@reservation = reservation
mail( :to => @reservation.guest.email,
:subject => 'Your reservations are done!' )
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def signup_email(user)\n @user = user\n mail(to: @user.email, subject: 'Thanks for signing up for our amazing app')\n end",
"def send_signup_email(user)\n @user = user\n mail( to: @user.email,\n subject: \"Thanks for signing up, #{@user.email}!\")\n end",
"def send_signup_email(user)\n ... | [
"0.85455173",
"0.85141313",
"0.8485442",
"0.84805363",
"0.8466388",
"0.8461117",
"0.8433878",
"0.83607185",
"0.8358826",
"0.8347456",
"0.8341746",
"0.8341746",
"0.83316386",
"0.83080643",
"0.8303365",
"0.8301317",
"0.82877415",
"0.82778716",
"0.8244165",
"0.8226288",
"0.82172... | 0.0 | -1 |
RunLength encoding is a simple form of compression that detects 'runs' of repeated instances of a symbol in a string and compresses them to a list of pairs of 'symbol' 'length'. For example, the string "Heeeeelllllooooo nurse!" Could be compressed using runlength encoding to the list of pairs [(1,'H'),(5,'e'),(5,'l'),(5,'o'),(1,'n'),(1,'u'),(1,'r'),(1,'s'),(1,'e')] Which seems to not be compressed, but if you represent it as an array of 18bytes (each pair is 2 bytes), then we save 5 bytes of space compressing this string. Write a function that takes in a string and returns a runlengthencoding of that string. (either as a list of pairs or as a 2byteper pair array) BONUS: Write a decompression function that takes in the RLE representation and returns the original string | def run_encode(text)
text.scan(/(.)(\1*)/).reduce([]) {|arr,match| arr << [ match[1].length + 1, match[0] ]}
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def decode_modified_run_length_encoded\n self.inject([]) do |array, current|\n if current.kind_of? Array\n array << [current[-1]] * current[0]\n else\n array << current\n end\n array\n end.flatten\n end",
"def modified_run_length_encode\n self.run_length_en... | [
"0.6804688",
"0.65113276",
"0.64338154",
"0.6414497",
"0.6406939",
"0.6387554",
"0.6349",
"0.63435316",
"0.6020335",
"0.57893974",
"0.5707534",
"0.567309",
"0.56607735",
"0.55903673",
"0.5580258",
"0.5570319",
"0.55518955",
"0.55312335",
"0.54988575",
"0.5496349",
"0.54719454... | 0.5999452 | 9 |
GET /admin/blogs GET /admin/blogs.json | def index
@admin_blogs = Admin::Blog.all
respond_to do |format|
format.html # index.html.erb
format.json { render json: @admin_blogs }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def blogs\n @blogs = Blog.all\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @administration }\n end\n end",
"def index\n @blogs = Blog.all\n render json: @blogs\n end",
"def index\n @blogs = Blog.all\n\n render json: @blogs\n e... | [
"0.8222594",
"0.78353745",
"0.7757086",
"0.76240563",
"0.76240563",
"0.7559082",
"0.74420637",
"0.7342531",
"0.7337836",
"0.73304206",
"0.724447",
"0.7186203",
"0.7176107",
"0.7176107",
"0.71370107",
"0.7033475",
"0.7014517",
"0.70048475",
"0.6978675",
"0.6944052",
"0.6933904... | 0.80760044 | 1 |
GET /admin/blogs/1 GET /admin/blogs/1.json | def show
@admin_blog = Admin::Blog.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @admin_blog }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def blogs\n @blogs = Blog.all\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @administration }\n end\n end",
"def index\n @admin_blogs = Admin::Blog.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json... | [
"0.7980677",
"0.7947852",
"0.76943374",
"0.7645318",
"0.75283265",
"0.75283265",
"0.74242324",
"0.728616",
"0.72193074",
"0.71178734",
"0.7059829",
"0.7059829",
"0.70441556",
"0.7034725",
"0.7021378",
"0.6977012",
"0.6977012",
"0.6937085",
"0.69315624",
"0.6927167",
"0.692716... | 0.7406272 | 7 |
GET /admin/blogs/new GET /admin/blogs/new.json | def new
@admin_blog = Admin::Blog.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @admin_blog }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new\n @blog = Blog.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @blog }\n end\n end",
"def new\n @blog = Blog.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @blog }\n end\n end",
"d... | [
"0.7632543",
"0.7632543",
"0.7593862",
"0.75112",
"0.75112",
"0.74660206",
"0.7431854",
"0.7431854",
"0.7429871",
"0.7362031",
"0.73323536",
"0.72947013",
"0.7245933",
"0.70686996",
"0.6895114",
"0.68485725",
"0.67523134",
"0.6745598",
"0.6729959",
"0.6729959",
"0.6681173",
... | 0.77257085 | 0 |
POST /admin/blogs POST /admin/blogs.json | def create
@admin_blog = Admin::Blog.new(params[:admin_blog])
respond_to do |format|
if @admin_blog.save
format.html { redirect_to admin_blogs_path, notice: I18n.t("flash.create_admin_blog") }
format.json { render json: @admin_blog, status: :created, location: @admin_blog }
else
format.html { render action: "new" }
format.json { render json: @admin_blog.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n @blog = current_user.blogs.new(blog_params)\n\n respond_to do |format|\n if @blog.save\n format.html { redirect_to users_blog_index_path, notice: 'Blog was successfully created.' }\n format.json { render :show, status: :created, location: @blog }\n else\n format.ht... | [
"0.73144656",
"0.72868675",
"0.72489566",
"0.72483975",
"0.7239018",
"0.7230286",
"0.7164843",
"0.70592785",
"0.70252514",
"0.7009184",
"0.6995042",
"0.69944805",
"0.6906439",
"0.6880417",
"0.68629664",
"0.6858992",
"0.685451",
"0.6834721",
"0.68062526",
"0.6741969",
"0.66807... | 0.7352193 | 0 |
PUT /admin/blogs/1 PUT /admin/blogs/1.json | def update
@admin_blog = Admin::Blog.find(params[:id])
respond_to do |format|
if @admin_blog.update_attributes(params[:admin_blog])
format.html { redirect_to admin_blogs_path, notice: I18n.t("flash.update_admin_blog") }
format.json { head :no_content }
else
format.html { render action: "edit" }
format.json { render json: @admin_blog.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update\n if @blog.update(blog_params)\n render json: @blog\n else\n render json: @blog.errors, status: :unprocessable_entity\n end\n end",
"def update\n @blog = Blog.find(params[:id])\n\n if @blog.update_attributes(params[:blog])\n redirect_to @blog, notice: 'Blog was successfu... | [
"0.69285333",
"0.6906007",
"0.68593264",
"0.68558943",
"0.6801227",
"0.6630947",
"0.65916026",
"0.65694755",
"0.654939",
"0.65028346",
"0.6496769",
"0.64828736",
"0.64619404",
"0.6423786",
"0.6402542",
"0.6383671",
"0.6383671",
"0.6383671",
"0.63836086",
"0.63828456",
"0.6365... | 0.6902838 | 2 |
DELETE /admin/blogs/1 DELETE /admin/blogs/1.json | def destroy
@admin_blog = Admin::Blog.find(params[:id])
respond_to do |format|
if @admin_blog.update_attributes(deleted_at: Time.now())
format.html { redirect_to admin_blogs_path, notice: I18n.t("flash.destroy_admin_blog") }
format.json { head :no_content }
else
format.html { render action: "destroy" }
format.json { render json: @admin_blog.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def destroy\n @myblog = Myblog.find(params[:id])\n @myblog.destroy\n\n respond_to do |format|\n format.html { redirect_to myblogs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @myblog = Myblog.find(params[:id])\n @myblog.destroy\n\n respond_to do |format|\n... | [
"0.7475145",
"0.7475145",
"0.7454765",
"0.7454683",
"0.7427059",
"0.74152935",
"0.73576784",
"0.7348391",
"0.73459685",
"0.73426294",
"0.73426294",
"0.733875",
"0.72838426",
"0.720124",
"0.7198334",
"0.71905315",
"0.7189891",
"0.71883607",
"0.7167395",
"0.7165593",
"0.7127651... | 0.7235769 | 13 |
Returns a URL to either the processed image or a service URL to a service that will process the image dynamicall. | def variant_url
if Rails.application.fastly_enabled?
FastlyLocation.new(attachment).url
else
attachment.processed.url
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def call(params = {})\n return variant.image.url(params) unless processible_image?\n\n \"/#{uid}\"\n end",
"def image_url\n is_processed ?\n FileSystem.url(image_path) : \n orig_image_url\n end",
"def image_url\n is_processed ?\n FileSystem.url(image_path) : \n ori... | [
"0.69252014",
"0.68056226",
"0.68056226",
"0.6635507",
"0.6555533",
"0.6410351",
"0.62772804",
"0.6168011",
"0.6101637",
"0.60781056",
"0.6071444",
"0.6057192",
"0.6040577",
"0.6038763",
"0.60360444",
"0.60348934",
"0.5982203",
"0.5931339",
"0.59076697",
"0.58875084",
"0.5880... | 0.0 | -1 |
Returns a URL to the unaltered original uploaded files. | def original_url
if Rails.application.cloudfront_enabled?
CloudFrontLocation.new(attachment.key, signed: signed?).url
elsif Rails.application.remote_storage?
s3_url
else
attachment.url
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def upload_file_url\n \"file://#{upload_full_path}\"\n end",
"def file_link\n return \"#{SITE_URL}system/ufiles/#{id}/original/#{ufile_file_name}\"\n end",
"def original\n \tobject.file.url(:large)\n end",
"def original_fullpath; end",
"def original_url; end",
"def original_url\n url\n ... | [
"0.71573555",
"0.6890575",
"0.68181145",
"0.680618",
"0.6789811",
"0.6775169",
"0.6758451",
"0.6747347",
"0.6712269",
"0.6599952",
"0.6483799",
"0.6461996",
"0.6409392",
"0.63228154",
"0.62897825",
"0.6263866",
"0.62430584",
"0.62425137",
"0.62425137",
"0.61765206",
"0.615838... | 0.6884803 | 2 |
Override and call super as necessary. | def on_start_listener(event)
start_listener
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def run\n super\n end",
"def run\n super\n end",
"def overrides; end",
"def perform\n super\n end",
"def perform\n super\n end",
"def perform\n super\n end",
"def perform\n super\n end",
"def initialize\n super\n ... | [
"0.73149353",
"0.73149353",
"0.7118824",
"0.7116712",
"0.7116712",
"0.7116712",
"0.7116712",
"0.71129674",
"0.71129674",
"0.70600474",
"0.70600474",
"0.70600474",
"0.70600474",
"0.70600474",
"0.70600474",
"0.70396894",
"0.7017464",
"0.70148975",
"0.6980456",
"0.69575113",
"0.... | 0.0 | -1 |
Override and call super as necessary. | def on_stop_listener(event)
stop_listener
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def run\n super\n end",
"def run\n super\n end",
"def overrides; end",
"def perform\n super\n end",
"def perform\n super\n end",
"def perform\n super\n end",
"def perform\n super\n end",
"def initialize\n super\n ... | [
"0.73149353",
"0.73149353",
"0.7118824",
"0.7116712",
"0.7116712",
"0.7116712",
"0.7116712",
"0.71129674",
"0.71129674",
"0.70600474",
"0.70600474",
"0.70600474",
"0.70600474",
"0.70600474",
"0.70600474",
"0.70396894",
"0.7017464",
"0.70148975",
"0.6980456",
"0.69575113",
"0.... | 0.0 | -1 |
Override and call super as necessary. | def on_listener_started(event)
@server_sig = event.data
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def run\n super\n end",
"def run\n super\n end",
"def overrides; end",
"def perform\n super\n end",
"def perform\n super\n end",
"def perform\n super\n end",
"def perform\n super\n end",
"def initialize\n super\n ... | [
"0.73149353",
"0.73149353",
"0.7118824",
"0.7116712",
"0.7116712",
"0.7116712",
"0.7116712",
"0.71129674",
"0.71129674",
"0.70600474",
"0.70600474",
"0.70600474",
"0.70600474",
"0.70600474",
"0.70600474",
"0.70396894",
"0.7017464",
"0.70148975",
"0.6980456",
"0.69575113",
"0.... | 0.0 | -1 |
Override and call super as necessary. | def on_listener_stopped(event)
@server_sig = nil
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def run\n super\n end",
"def run\n super\n end",
"def overrides; end",
"def perform\n super\n end",
"def perform\n super\n end",
"def perform\n super\n end",
"def perform\n super\n end",
"def initialize\n super\n ... | [
"0.7313576",
"0.7313576",
"0.7119513",
"0.71156645",
"0.71156645",
"0.71156645",
"0.71156645",
"0.7112517",
"0.7112517",
"0.70587885",
"0.70587885",
"0.70587885",
"0.70587885",
"0.70587885",
"0.70587885",
"0.7039202",
"0.70173126",
"0.7014351",
"0.6980056",
"0.6957026",
"0.69... | 0.0 | -1 |
Override and call super as necessary. | def shutdown_handler(event)
super
stop_listener if @server_sig
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def run\n super\n end",
"def run\n super\n end",
"def overrides; end",
"def perform\n super\n end",
"def perform\n super\n end",
"def perform\n super\n end",
"def perform\n super\n end",
"def initialize\n super\n ... | [
"0.73149353",
"0.73149353",
"0.7118824",
"0.7116712",
"0.7116712",
"0.7116712",
"0.7116712",
"0.71129674",
"0.71129674",
"0.70600474",
"0.70600474",
"0.70600474",
"0.70600474",
"0.70600474",
"0.70600474",
"0.70396894",
"0.7017464",
"0.70148975",
"0.6980456",
"0.69575113",
"0.... | 0.0 | -1 |
Read a source file from the repo and strip its comments The argument f is the result of Grit.lstree Memoizes result per f | def semaphore
@semaphore ||= Mutex.new
@semaphore
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def clean_comments(file)\n @in_file = file.readlines\n @in_file.delete_if{|index| index.match(/^[\\/]{2}/) }\n end",
"def read_file_omitting_comments(input)\n return '' unless File.exist?(input)\n\n File.readlines(input).reject { |line| line =~ /^\\s*#/ }.join('')\n end",
"def parse_w... | [
"0.6469825",
"0.63979584",
"0.62319463",
"0.6184842",
"0.6161725",
"0.61128867",
"0.61128867",
"0.61128867",
"0.60738784",
"0.6030385",
"0.60249114",
"0.5961073",
"0.5959662",
"0.5942848",
"0.59363806",
"0.5923732",
"0.5845309",
"0.5798259",
"0.56033325",
"0.5545736",
"0.5537... | 0.0 | -1 |
Get a list of pull requests for the processed project | def pull_reqs(project, github_id = -1)
q = <<-QUERY
select u.login as login, p.name as project_name, pr.id, pr.pullreq_id as github_id,
a.created_at as created_at, b.created_at as closed_at,
(select created_at
from pull_request_history prh1
where prh1.pull_request_id = pr.id
and prh1.action='merged' limit 1) as merged_at,
timestampdiff(minute, a.created_at, b.created_at) as lifetime_minutes,
timestampdiff(minute, a.created_at, (select created_at
from pull_request_history prh1
where prh1.pull_request_id = pr.id and prh1.action='merged' limit 1)
) as mergetime_minutes
from pull_requests pr, projects p, users u,
pull_request_history a, pull_request_history b
where p.id = pr.base_repo_id
and a.pull_request_id = pr.id
and a.pull_request_id = b.pull_request_id
and a.action='opened' and b.action='closed'
and a.created_at < b.created_at
and p.owner_id = u.id
and p.id = ?
QUERY
if github_id != -1
q += " and pr.pullreq_id = #{github_id} "
end
q += 'group by pr.id order by pr.pullreq_id desc;'
db.fetch(q, project[:id]).all
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def pull_requests repo\n name = full_name repo\n \n %w[open closed].reduce([]) do |memo, state|\n memo | octokit.pulls(name, state, :per_page=>100)\n end\n end",
"def get_pull_requests(repo_full_name)\n @client.pull_requests(repo_full_name, state: 'open')\n end",
"def pull_request... | [
"0.79412997",
"0.779502",
"0.779346",
"0.7744453",
"0.7699714",
"0.769826",
"0.759849",
"0.7460059",
"0.73730844",
"0.7282366",
"0.72492176",
"0.7243204",
"0.7213866",
"0.70695406",
"0.69015753",
"0.6785178",
"0.677422",
"0.67715675",
"0.67566174",
"0.6718396",
"0.6709835",
... | 0.7187174 | 13 |
Process a single pull request | def process_pull_request(pr, lang)
# Statistics across pull request commits
stats = pr_stats(pr)
merged = !pr[:merged_at].nil?
git_merged, merge_reason, merge_person = @close_reason[pr[:github_id]]
# Count number of src/comment lines
src = src_lines(pr[:id].to_f)
if src == 0 then raise Exception.new("Bad src lines: 0, pr: #{pr[:github_id]}, id: #{pr[:id]}") end
months_back = 3
commits_incl_prs = commits_last_x_months(pr, false, months_back)
prev_pull_reqs = prev_pull_requests(pr,'opened')
# Create line for a pull request
{
:pull_req_id => pr[:id],
:project_name => "#{pr[:login]}/#{pr[:project_name]}",
:lang => lang,
:github_id => pr[:github_id],
:created_at => Time.at(pr[:created_at]).to_i,
:merged_at => merge_time(pr, merged, git_merged),
:closed_at => Time.at(pr[:closed_at]).to_i,
:lifetime_minutes => pr[:lifetime_minutes],
:mergetime_minutes => merge_time_minutes(pr, merged, git_merged),
:merged_using => merge_reason.to_s,
:conflict => conflict?(pr),
:forward_links => forward_links?(pr),
:team_size => team_size_at_open(pr, months_back),
:num_commits => num_commits(pr),
:num_commits_open => num_commits_at_open(pr),
:num_pr_comments => num_pr_comments(pr),
:num_issue_comments => num_issue_comments(pr),
:num_commit_comments => num_commit_comments(pr),
:num_comments => num_pr_comments(pr) + num_issue_comments(pr) + num_commit_comments(pr),
:num_participants => num_participants(pr),
:files_added => stats[:files_added],
:files_deleted => stats[:files_removed],
:files_modified => stats[:files_modified],
:files_changed => stats[:files_added] + stats[:files_modified] + stats[:files_removed],
:src_files => stats[:src_files],
:doc_files => stats[:doc_files],
:other_files => stats[:other_files],
:perc_external_contribs => commits_last_x_months(pr, true, months_back) / commits_incl_prs,
:sloc => src,
:src_churn => stats[:lines_added] + stats[:lines_deleted],
:test_churn => stats[:test_lines_added] + stats[:test_lines_deleted],
:commits_on_files_touched => commits_on_files_touched(pr, months_back),
:commits_to_hottest_file => commits_to_hottest_file(pr, months_back),
:test_lines_per_kloc => (test_lines(pr[:id]).to_f / src.to_f) * 1000,
:test_cases_per_kloc => (num_test_cases(pr[:id]).to_f / src.to_f) * 1000,
:asserts_per_kloc => (num_assertions(pr[:id]).to_f / src.to_f) * 1000,
:watchers => watchers(pr),
:requester => requester(pr),
:closer => closer(pr),
:merger => merge_person,
:prev_pullreqs => prev_pull_reqs,
:requester_succ_rate => if prev_pull_reqs > 0 then prev_pull_requests(pr, 'merged').to_f / prev_pull_reqs.to_f else 0 end,
:followers => followers(pr),
:intra_branch => if intra_branch?(pr) == 1 then true else false end,
:main_team_member => main_team_member?(pr, months_back),
:social_connection_tsay => social_connection_tsay?(pr),
:hotness_vasilescu => hotness_vasilescu(pr, months_back),
:team_size_vasilescu => team_size_vasilescu(pr, months_back),
:description_complexity => description_complexity(pr),
:workload => workload(pr),
:prior_interaction_issue_events => prior_interaction_issue_events(pr, months_back),
:prior_interaction_issue_comments => prior_interaction_issue_comments(pr, months_back),
:prior_interaction_pr_events => prior_interaction_pr_events(pr, months_back),
:prior_interaction_pr_comments => prior_interaction_pr_comments(pr, months_back),
:prior_interaction_commits => prior_interaction_commits(pr, months_back),
:prior_interaction_commit_comments => prior_interaction_commit_comments(pr, months_back),
:first_response => first_response(pr),
:ci_latency => ci_latency(pr),
:ci_errors => ci_errors?(pr),
:ci_test_failures => ci_test_failures?(pr),
}
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def process_pull_request(pr, lang)\n\n # Statistics across pull request commits\n stats = pr_stats(pr[:id])\n\n merged = ! pr[:merged_at].nil?\n git_merged = false\n merge_reason = :github\n\n if not merged\n git_merged, merge_reason = merged_with_git?(pr)\n end\n\n # Count number of s... | [
"0.73715436",
"0.7356396",
"0.710522",
"0.7092671",
"0.7053264",
"0.7037376",
"0.69729483",
"0.68844754",
"0.68553483",
"0.6731509",
"0.67089295",
"0.6690092",
"0.66153795",
"0.66135085",
"0.66122043",
"0.65744686",
"0.6532083",
"0.6472704",
"0.6440637",
"0.6391988",
"0.63858... | 0.7366131 | 1 |
Checks whether a merge of the pull request occurred outside Github This will only discover clean merges; rebases and forcepushes override the commit history, so they are impossible to detect without source code analysis. | def merged_with_git?(pr)
#1. Commits from the pull request appear in the master branch
q = <<-QUERY
select c.sha
from pull_request_commits prc, commits c
where prc.commit_id = c.id
and prc.pull_request_id = ?
QUERY
db.fetch(q, pr[:id]).each do |x|
unless @all_commits.select { |y| x[:sha].start_with? y }.empty?
return [true, :commits_in_master]
end
end
#2. The PR was closed by a commit (using the Fixes: convention).
# Check whether the commit that closes the PR is in the project's
# master branch
unless @closed_by_commit[pr[:github_id]].nil?
sha = @closed_by_commit[pr[:github_id]]
unless @all_commits.select { |x| sha.start_with? x }.empty?
return [true, :fixes_in_commit]
end
end
comments = issue_comments(pr[:login], pr[:project_name], pr[:github_id])
comments.reverse.take(3).map { |x| x['body'] }.uniq.each do |last|
# 3. Last comment contains a commit number
last.scan(/([0-9a-f]{6,40})/m).each do |x|
# Commit is identified as merged
if last.match(/merg(?:ing|ed)/i) or
last.match(/appl(?:ying|ied)/i) or
last.match(/pull[?:ing|ed]/i) or
last.match(/push[?:ing|ed]/i) or
last.match(/integrat[?:ing|ed]/i)
return [true, :commit_sha_in_comments]
else
# Commit appears in master branch
unless @all_commits.select { |y| x[0].start_with? y }.empty?
return [true, :commit_sha_in_comments]
end
end
end
# 4. Merg[ing|ed] or appl[ing|ed] as last comment of pull request
if last.match(/merg(?:ing|ed)/i) or
last.match(/appl(?:ying|ed)/i) or
last.match(/pull[?:ing|ed]/i) or
last.match(/push[?:ing|ed]/i) or
last.match(/integrat[?:ing|ed]/i)
return [true, :merged_in_comments]
end
end
[false, :unknown]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def merged_with_git?(pr)\n\n #1. Commits from the pull request appear in the master branch\n q = <<-QUERY\n\t select c.sha\n from pull_request_commits prc, commits c\n\t where prc.commit_id = c.id\n\t\t and prc.pull_request_id = ?\n QUERY\n db.fetch(q, pr[:id]).each do |x|\n unless @all_comm... | [
"0.7157084",
"0.7024886",
"0.6796973",
"0.6494114",
"0.6487157",
"0.6386791",
"0.6298509",
"0.6074725",
"0.598951",
"0.59493953",
"0.5933063",
"0.5827187",
"0.57913154",
"0.57852113",
"0.57788813",
"0.57530034",
"0.5749998",
"0.5712126",
"0.5698849",
"0.5655649",
"0.56404954"... | 0.7166265 | 0 |
Number of developers that have committed at least once in the interval between the pull request open up to +interval_months+ back | def team_size_at_open(pr, interval_months)
q = <<-QUERY
select count(distinct author_id) as teamsize
from projects p, commits c, project_commits pc, pull_requests pr,
pull_request_history prh
where p.id = pc.project_id
and pc.commit_id = c.id
and p.id = pr.base_repo_id
and prh.pull_request_id = pr.id
and not exists (select * from pull_request_commits prc1 where prc1.commit_id = c.id)
and prh.action = 'opened'
and c.created_at < prh.created_at
and c.created_at > DATE_SUB(prh.created_at, INTERVAL #{interval_months} MONTH)
and pr.id=?;
QUERY
db.fetch(q, pr[:id]).first[:teamsize]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def team_size_at_open(pr_id, interval_months)\n q = <<-QUERY\n select count(distinct author_id) as teamsize\n from projects p, commits c, project_commits pc, pull_requests pr,\n pull_request_history prh\n where p.id = pc.project_id\n and pc.commit_id = c.id\n and p.id = pr.base_repo_i... | [
"0.67035663",
"0.6379662",
"0.6324253",
"0.60829896",
"0.60086155",
"0.5920281",
"0.5900949",
"0.5851636",
"0.5841016",
"0.57911545",
"0.57614726",
"0.57493454",
"0.57299924",
"0.569612",
"0.5682094",
"0.56521595",
"0.5588286",
"0.5580944",
"0.5568202",
"0.55591005",
"0.55271... | 0.6661594 | 1 |
Number of commits in pull request | def num_commits(pr)
q = <<-QUERY
select count(*) as commit_count
from pull_requests pr, pull_request_commits prc
where pr.id = prc.pull_request_id
and pr.id=?
group by prc.pull_request_id
QUERY
db.fetch(q, pr[:id]).first[:commit_count]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def num_commits(pr_id)\n q = <<-QUERY\n select count(*) as commit_count\n from pull_requests pr, pull_request_commits prc\n where pr.id = prc.pull_request_id\n and pr.id=?\n group by prc.pull_request_id\n QUERY\n if_empty(db.fetch(q, pr_id).all, :commit_count)\n end",
"def num_commits(... | [
"0.80021495",
"0.7814021",
"0.7810049",
"0.7747609",
"0.7726167",
"0.77139986",
"0.7681667",
"0.7627089",
"0.7568288",
"0.756284",
"0.7502324",
"0.7078911",
"0.7054514",
"0.7024845",
"0.70200264",
"0.6994725",
"0.69469666",
"0.69331604",
"0.6876185",
"0.681915",
"0.6806591",
... | 0.82978874 | 0 |
Number of pull request code review comments in pull request | def num_pr_comments(pr)
q = <<-QUERY
select count(*) as comment_count
from pull_request_comments prc
where prc.pull_request_id = ?
and prc.created_at < (
select max(created_at)
from pull_request_history
where action = 'closed' and pull_request_id = ?)
QUERY
db.fetch(q, pr[:id], pr[:id]).first[:comment_count]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def num_issue_comments(pr)\n q = <<-QUERY\n select count(*) as issue_comment_count\n from pull_requests pr, issue_comments ic, issues i\n where ic.issue_id=i.id\n and i.issue_id=pr.pullreq_id\n and pr.base_repo_id = i.repo_id\n and pr.id = ?\n and ic.created_at < (\n select max(created... | [
"0.7556371",
"0.7336787",
"0.7331772",
"0.73292726",
"0.73016566",
"0.7274828",
"0.72505116",
"0.7059893",
"0.69705606",
"0.6854574",
"0.6749827",
"0.6728996",
"0.67131925",
"0.66869813",
"0.6661995",
"0.6626507",
"0.6560825",
"0.6541231",
"0.64526445",
"0.6407268",
"0.634022... | 0.7432952 | 1 |
Number of pull request discussion comments | def num_issue_comments(pr)
q = <<-QUERY
select count(*) as issue_comment_count
from pull_requests pr, issue_comments ic, issues i
where ic.issue_id=i.id
and i.issue_id=pr.pullreq_id
and pr.base_repo_id = i.repo_id
and pr.id = ?
and ic.created_at < (
select max(created_at)
from pull_request_history
where action = 'closed' and pull_request_id = ?)
QUERY
db.fetch(q, pr[:id], pr[:id]).first[:issue_comment_count]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def num_pr_comments(pr)\n q = <<-QUERY\n select count(*) as comment_count\n from pull_request_comments prc\n where prc.pull_request_id = ?\n and prc.created_at < (\n select max(created_at)\n from pull_request_history\n where action = 'closed' and pull_request_id = ?)\n QUERY\n d... | [
"0.7576217",
"0.75393003",
"0.74854296",
"0.7479533",
"0.74347186",
"0.7316669",
"0.7253537",
"0.72396207",
"0.72226524",
"0.72040796",
"0.715874",
"0.71549124",
"0.7084611",
"0.7038057",
"0.7018007",
"0.7018007",
"0.6996259",
"0.6949047",
"0.691911",
"0.68226385",
"0.6820623... | 0.77144504 | 0 |
Number of commit comments on commits composing the pull request | def num_commit_comments(pr)
q = <<-QUERY
select count(*) as commit_comment_count
from pull_request_commits prc, commit_comments cc
where prc.commit_id = cc.commit_id
and prc.pull_request_id = ?
QUERY
db.fetch(q, pr[:id]).first[:commit_comment_count]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def num_commits(lines)\n i = 0\n\tlines.each{|line| \n\t\tif line.include? \"commit\"\n\t\t\ti += 1\n\t\tend\n\t}\n\treturn i\nend",
"def num_commits(pr)\n q = <<-QUERY\n select count(*) as commit_count\n from pull_requests pr, pull_request_commits prc\n where pr.id = prc.pull_request_id\n ... | [
"0.75296736",
"0.745867",
"0.7440561",
"0.74365324",
"0.73889405",
"0.7360255",
"0.73154676",
"0.73098975",
"0.72924584",
"0.7228431",
"0.7217545",
"0.72108775",
"0.71541756",
"0.6962108",
"0.6938286",
"0.6931151",
"0.6880198",
"0.68053406",
"0.67937547",
"0.66441905",
"0.664... | 0.8231813 | 0 |
Number of followers of the person that created the pull request | def followers(pr)
q = <<-QUERY
select count(f.follower_id) as num_followers
from pull_requests pr, followers f, pull_request_history prh
where prh.actor_id = f.user_id
and prh.pull_request_id = pr.id
and prh.action = 'opened'
and f.created_at < prh.created_at
and pr.id = ?
QUERY
db.fetch(q, pr[:id]).first[:num_followers]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def followers_count\n follow_count_for_a(:follower)\n end",
"def followers_count\n @target.followers_count.to_i\n end",
"def number_of_followees\n return_list_of_followed_users.size\n end",
"def followees_count\n follow_count_for_a(:followee)\n end",
"def num_followers\n ... | [
"0.81894225",
"0.80769515",
"0.79417145",
"0.79048413",
"0.771846",
"0.7598255",
"0.7485302",
"0.7451493",
"0.7389412",
"0.726542",
"0.7252984",
"0.7220529",
"0.72170365",
"0.71433103",
"0.70582056",
"0.7028172",
"0.7020162",
"0.69493824",
"0.690434",
"0.6893643",
"0.6864412"... | 0.78072184 | 4 |
Number of project watchers/stargazers at the time the pull request was made | def watchers(pr)
q = <<-QUERY
select count(w.user_id) as num_watchers
from watchers w, pull_requests pr, pull_request_history prh
where prh.pull_request_id = pr.id
and w.created_at < prh.created_at
and w.repo_id = pr.base_repo_id
and prh.action='opened'
and pr.id = ?
QUERY
db.fetch(q, pr[:id]).first[:num_watchers]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def stargazers_count\n self.watchers_count\n end",
"def pull_requests\n pull_request_link = document.at_css('a[highlight=\"repo_pulls\"] .counter')\n return nil unless pull_request_link\n pull_request_link.text[/\\d+/].to_i\n end",
"def watchers(pr_id)\n q = <<-QUERY\n select count(w.user_i... | [
"0.69450414",
"0.66943973",
"0.6594038",
"0.6472232",
"0.6208711",
"0.6205655",
"0.6109333",
"0.6079912",
"0.60702026",
"0.60435456",
"0.59364015",
"0.5879637",
"0.5861214",
"0.5839766",
"0.5832184",
"0.5814369",
"0.5808842",
"0.57986206",
"0.5728349",
"0.5698596",
"0.5697546... | 0.70845973 | 0 |
Person that first closed the pull request | def closer(pr)
q = <<-QUERY
select u.login as login
from pull_request_history prh, users u
where prh.pull_request_id = ?
and prh.actor_id = u.id
and prh.action = 'closed'
QUERY
closer = db.fetch(q, pr[:id]).first
if closer.nil?
q = <<-QUERY
select u.login as login
from issues i, issue_events ie, users u
where i.pull_request_id = ?
and ie.issue_id = i.id
and (ie.action = 'closed' or ie.action = 'merged')
and u.id = ie.actor_id
QUERY
closer = db.fetch(q, pr[:id]).first
end
unless closer.nil?
closer[:login]
else
''
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def open_pull_request\n outgoing_pull_requests.find(&:open?)\n end",
"def process_closed_pull_request(pull_request)\n pr_name = pull_request['base']['repo']['full_name'].to_s\n pr_number = pull_request['number'].to_s\n pr_key = pr_name + \":\" + pr_number\n current_commit_hash = pull_request['hea... | [
"0.63733584",
"0.6151286",
"0.6139798",
"0.61099535",
"0.6058159",
"0.603897",
"0.602349",
"0.5929755",
"0.58977675",
"0.5794717",
"0.5779351",
"0.5715367",
"0.56523514",
"0.5609084",
"0.5509241",
"0.5477319",
"0.54753244",
"0.54663795",
"0.54663795",
"0.54578227",
"0.5410378... | 0.53723025 | 27 |
Person that first merged the pull request | def merger(pr)
q = <<-QUERY
select u.login as login
from issues i, issue_events ie, users u
where i.pull_request_id = ?
and ie.issue_id = i.id
and ie.action = 'merged'
and u.id = ie.actor_id
QUERY
merger = db.fetch(q, pr[:id]).first
if merger.nil?
# If the PR was merged, then it is safe to assume that the
# closer is also the merger
if not @close_reason[pr[:github_id]].nil? and @close_reason[pr[:github_id]][1] != :unknown
closer(pr)
else
''
end
else
merger[:login]
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def merge_pull_request\n git.merge pull_request.branch\n end",
"def create_merge_request\n author_id, author_found = user_finder.author_id_for(pull_request)\n\n description = MarkdownText\n .format(pull_request.description, pull_request.author, author_found)\n\n attr... | [
"0.6619105",
"0.64542943",
"0.63079023",
"0.6156396",
"0.6037527",
"0.59483516",
"0.5921236",
"0.5917806",
"0.58683294",
"0.5858013",
"0.58519405",
"0.5818597",
"0.5735177",
"0.57218945",
"0.5690467",
"0.5678707",
"0.56734693",
"0.56366605",
"0.56248313",
"0.5603748",
"0.5598... | 0.6875498 | 0 |
Number of followers of the person that created the pull request | def requester(pr)
q = <<-QUERY
select u.login as login
from users u, pull_request_history prh
where prh.actor_id = u.id
and action = 'opened'
and prh.pull_request_id = ?
QUERY
db.fetch(q, pr[:id]).first[:login]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def followers_count\n follow_count_for_a(:follower)\n end",
"def followers_count\n @target.followers_count.to_i\n end",
"def number_of_followees\n return_list_of_followed_users.size\n end",
"def followees_count\n follow_count_for_a(:followee)\n end",
"def followers(pr)\n ... | [
"0.81894225",
"0.80769515",
"0.79417145",
"0.79048413",
"0.78072184",
"0.771846",
"0.7598255",
"0.7485302",
"0.7451493",
"0.7389412",
"0.726542",
"0.7252984",
"0.7220529",
"0.72170365",
"0.71433103",
"0.70582056",
"0.7028172",
"0.7020162",
"0.69493824",
"0.690434",
"0.6893643... | 0.0 | -1 |
Number of previous pull requests for the pull requester | def prev_pull_requests(pr, action)
if action == 'merged'
q = <<-QUERY
select pr.pullreq_id, prh.pull_request_id as num_pull_reqs
from pull_request_history prh, pull_requests pr
where prh.action = 'opened'
and prh.created_at < (select min(created_at) from pull_request_history prh1 where prh1.pull_request_id = ? and prh1.action = 'opened')
and prh.actor_id = (select min(actor_id) from pull_request_history prh1 where prh1.pull_request_id = ? and prh1.action = 'opened')
and prh.pull_request_id = pr.id
and pr.base_repo_id = (select pr1.base_repo_id from pull_requests pr1 where pr1.id = ?);
QUERY
pull_reqs = db.fetch(q, pr[:id], pr[:id], pr[:id]).all
pull_reqs.reduce(0) do |acc, pull_req|
if not @close_reason[pull_req[:pullreq_id]].nil? and @close_reason[pull_req[:pullreq_id]][1] != :unknown
acc += 1
end
acc
end
else
q = <<-QUERY
select pr.pullreq_id, prh.pull_request_id as num_pull_reqs
from pull_request_history prh, pull_requests pr
where prh.action = ?
and prh.created_at < (select min(created_at) from pull_request_history prh1 where prh1.pull_request_id = ?)
and prh.actor_id = (select min(actor_id) from pull_request_history prh1 where prh1.pull_request_id = ? and action = ?)
and prh.pull_request_id = pr.id
and pr.base_repo_id = (select pr1.base_repo_id from pull_requests pr1 where pr1.id = ?);
QUERY
db.fetch(q, action, pr[:id], pr[:id], action, pr[:id]).all.size
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_pulls_count\n pulls_count\n end",
"def get_pull_request_count(repo_id)\n pull_request_count = client.pull_requests(repo_id, :state => 'open').length\n rescue Octokit::NotFound\n nil\n end",
"def pull_requests\n pull_request_link = document.at_css('a[highlight=\"repo_pulls\"] .counter')\n... | [
"0.713514",
"0.7131682",
"0.70823425",
"0.68670225",
"0.6846943",
"0.68152624",
"0.6800499",
"0.65618026",
"0.6551197",
"0.6431202",
"0.633431",
"0.62567836",
"0.62564844",
"0.6225859",
"0.6204493",
"0.6197439",
"0.6189901",
"0.61844516",
"0.6179055",
"0.6114855",
"0.6101259"... | 0.72771865 | 0 |
The number of events before a particular pull request that the user has participated in for this project. | def prior_interaction_issue_events(pr, months_back)
q = <<-QUERY
select count(distinct(i.id)) as num_issue_events
from issue_events ie, pull_request_history prh, pull_requests pr, issues i
where ie.actor_id = prh.actor_id
and i.repo_id = pr.base_repo_id
and i.id = ie.issue_id
and prh.pull_request_id = pr.id
and prh.action = 'opened'
and ie.created_at > DATE_SUB(prh.created_at, INTERVAL #{months_back} MONTH)
and ie.created_at < prh.created_at
and prh.pull_request_id = ?
QUERY
db.fetch(q, pr[:id]).first[:num_issue_events]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def num_pr_comments(pr)\n q = <<-QUERY\n select count(*) as comment_count\n from pull_request_comments prc\n where prc.pull_request_id = ?\n and prc.created_at < (\n select max(created_at)\n from pull_request_history\n where action = 'closed' and pull_request_id = ?)\n QUERY\n d... | [
"0.6354733",
"0.6182022",
"0.6172909",
"0.6156271",
"0.6140836",
"0.6135201",
"0.6128242",
"0.6064098",
"0.59022",
"0.5887495",
"0.5867945",
"0.5838631",
"0.5827062",
"0.5799968",
"0.57770145",
"0.57725924",
"0.5760466",
"0.5752995",
"0.574219",
"0.5713595",
"0.57036203",
"... | 0.6722686 | 0 |
Median number of commits to files touched by the pull request relative to all project commits during the last three months | def hotness_vasilescu(pr, months_back)
commits_per_file = commits_on_pr_files(pr, months_back).map{|x| x[1].size}.sort
med = commits_per_file[commits_per_file.size/2]
med / commits_last_x_months(pr, true, months_back).to_f
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def n_commits(date)\n 30\nend",
"def commits_on_files_touched(pr, months_back)\n commits_on_pr_files(pr, months_back).reduce([]) do |acc, commit_list|\n acc + commit_list[1]\n end.flatten.uniq.size\n end",
"def commits_last_x_months(pr_id, exclude_pull_req, months)\n q = <<-QUERY\n select co... | [
"0.6779668",
"0.6603137",
"0.64762396",
"0.6458871",
"0.6443814",
"0.6323527",
"0.6317565",
"0.6268151",
"0.6256477",
"0.60984623",
"0.6031138",
"0.6016606",
"0.5934727",
"0.5889296",
"0.5879036",
"0.5844175",
"0.58407736",
"0.5840511",
"0.58368546",
"0.5813489",
"0.5808965",... | 0.644596 | 4 |
People that committed (not through pull requests) up to months_back from the time the PR was created. | def committer_team(pr, months_back)
q = <<-QUERY
select distinct(u.login)
from commits c, project_commits pc, pull_requests pr, users u, pull_request_history prh
where pr.base_repo_id = pc.project_id
and not exists (select * from pull_request_commits where commit_id = c.id)
and pc.commit_id = c.id
and pr.id = ?
and u.id = c.committer_id
and u.fake is false
and prh.pull_request_id = pr.id
and prh.action = 'opened'
and c.created_at > DATE_SUB(prh.created_at, INTERVAL #{months_back} MONTH)
and c.created_at < prh.created_at;
QUERY
db.fetch(q, pr[:id]).all
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def merger_team(pr, months_back)\n @close_reason.map do |k,v|\n created_at = @prs.find{|x| x[:github_id] == k}\n [created_at[:created_at], v[2]]\n end.find_all do |x|\n x[0].to_i > (pr[:created_at].to_i - months_back * 30 * 24 * 3600)\n end.map do |x|\n x[1]\n end.select{|x| x != '... | [
"0.7264976",
"0.6709216",
"0.6700396",
"0.6455931",
"0.6409454",
"0.6305739",
"0.6300893",
"0.6259788",
"0.61844206",
"0.61705965",
"0.6054854",
"0.5956339",
"0.5894332",
"0.5868499",
"0.57975554",
"0.5787727",
"0.57327735",
"0.570064",
"0.5680473",
"0.563527",
"0.55958325",
... | 0.66456604 | 3 |
People that merged (not through pull requests) up to months_back from the time the PR was created. | def merger_team(pr, months_back)
@close_reason.map do |k,v|
created_at = @prs.find{|x| x[:github_id] == k}
[created_at[:created_at], v[2]]
end.find_all do |x|
x[0].to_i > (pr[:created_at].to_i - months_back * 30 * 24 * 3600)
end.map do |x|
x[1]
end.select{|x| x != ''}.uniq
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def merger_team(pr, months_back = nil)\n recently_merged = prs.find_all do |b|\n close_reason[b[:github_id]] != :unknown and\n (months_back.nil? ? true : b[:created_at].to_i > (pr[:created_at].to_i - months_back * 30 * 24 * 3600))\n end.map do |b|\n b[:github_id]\n end\n\n q = <<-QUE... | [
"0.72336936",
"0.62989974",
"0.6249305",
"0.60576725",
"0.60278976",
"0.6006251",
"0.5918665",
"0.5904491",
"0.59026676",
"0.58392525",
"0.5821473",
"0.5814385",
"0.57702154",
"0.5687415",
"0.5676743",
"0.5497085",
"0.54927176",
"0.54466033",
"0.54411095",
"0.5437042",
"0.541... | 0.7605352 | 0 |
Number of integrators active during x months prior to pull request creation. | def team_size_vasilescu(pr, months_back)
(committer_team(pr, months_back) + merger_team(pr, months_back)).uniq.size
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def nb_open_periods\n periods.opened.count\n end",
"def active_users\n PullRequest.active_users(current_year)\n end",
"def how_many_active_announcements\n return announcements.where('expires_at >= ?', Date.today).count\n end",
"def approvals_count\n return 0 if self.id.nil?\n return Request... | [
"0.5985353",
"0.59820515",
"0.5965511",
"0.5924122",
"0.5807508",
"0.5736698",
"0.57154125",
"0.57074374",
"0.56603837",
"0.5642401",
"0.5642401",
"0.55874175",
"0.5520761",
"0.5516832",
"0.54695946",
"0.5438682",
"0.5435393",
"0.5431008",
"0.54174256",
"0.54072326",
"0.54072... | 0.0 | -1 |
Time interval in minutes from pull request creation to first response by reviewers | def first_response(pr)
q = <<-QUERY
select min(created) as first_resp from (
select min(prc.created_at) as created
from pull_request_comments prc, users u
where prc.pull_request_id = ?
and u.id = prc.user_id
and u.login not in ('travis-ci', 'cloudbees')
and prc.created_at < (
select max(created_at)
from pull_request_history
where action = 'closed' and pull_request_id = ?)
union
select min(ic.created_at) as created
from issues i, issue_comments ic, users u
where i.pull_request_id = ?
and i.id = ic.issue_id
and u.id = ic.user_id
and u.login not in ('travis-ci', 'cloudbees')
and ic.created_at < (
select max(created_at)
from pull_request_history
where action = 'closed' and pull_request_id = ?)
) as a;
QUERY
resp = db.fetch(q, pr[:id], pr[:id], pr[:id], pr[:id]).first[:first_resp]
unless resp.nil?
(resp - pr[:created_at]).to_i / 60
else
-1
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def next_scheduled_at\n reviewed_at + (24.hours * next_interval)\n end",
"def total_preparation_submission_minutes\n (DateTime.now.to_i - submission.client.created_at.to_datetime.to_i) / 60\n end",
"def review_interval\n @sbProject.reviewInterval\n end",
"def trigger_time_for_user_in_grou... | [
"0.61318254",
"0.60146344",
"0.59754807",
"0.5736516",
"0.57034457",
"0.56793433",
"0.55755544",
"0.5573477",
"0.5518182",
"0.5517509",
"0.5433176",
"0.5419649",
"0.54088527",
"0.5406192",
"0.5398817",
"0.53417176",
"0.5329991",
"0.53286713",
"0.5318462",
"0.52903634",
"0.527... | 0.5896216 | 3 |
Time between PR arrival and last CI run | def ci_latency(pr)
last_run = travis.find_all{|b| b[:pull_req] == pr[:github_id]}.sort_by { |x| Time.parse(x[:finished_at]).to_i }[-1]
unless last_run.nil?
Time.parse(last_run[:finished_at]) - pr[:created_at]
else
-1
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def run_time\n ((Time.now - start_time) * 1000).round\n end",
"def pred_time\n @time = (@time - @delta).max(0)\n end",
"def ready_in_time\n return self.preparation_time + self.cooking_time\n end",
"def take_time\n if self.completed?\n (complete_on - started_on) if complete_on && s... | [
"0.65395087",
"0.65022016",
"0.64771545",
"0.6391326",
"0.63715094",
"0.63444453",
"0.6339197",
"0.63330895",
"0.63243395",
"0.63132787",
"0.6300052",
"0.62845975",
"0.6266457",
"0.61860746",
"0.61581624",
"0.6157617",
"0.6148821",
"0.61453664",
"0.6103937",
"0.60984904",
"0.... | 0.7237871 | 0 |
Did the build result in errors? | def ci_errors?(pr)
not travis.find_all{|b| b[:pull_req] == pr[:github_id] and b[:status] == 'errored'}.empty?
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def complete_failed?(build)\n return if ENV[\"CODY_TEST\"]\n build.build_complete && build.build_status != \"SUCCEEDED\"\n end",
"def check_error\n if @errors.empty?\n false\n else\n @log.error(@taskname) { \"--- Configuration for #{@taskname} ---\" }\n @doc.each do |doc|\n ... | [
"0.69374293",
"0.6803087",
"0.6543348",
"0.65288985",
"0.6357394",
"0.63572127",
"0.6271711",
"0.62716395",
"0.62407386",
"0.6231105",
"0.62107897",
"0.6197163",
"0.6195236",
"0.6193814",
"0.61927825",
"0.61855596",
"0.6184797",
"0.6182824",
"0.6178348",
"0.61309874",
"0.6118... | 0.6300199 | 6 |
Did the build result in test failuers? | def ci_test_failures?(pr)
not travis.find_all{|b| b[:pull_req] == pr[:github_id] and b[:status] == 'failed'}.empty?
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def complete_failed?(build)\n return if ENV[\"CODY_TEST\"]\n build.build_complete && build.build_status != \"SUCCEEDED\"\n end",
"def flexmock_test_has_failed? # :nodoc:\n passed? == false\n end",
"def failed?\n true\n end",
"def failed?\n @passed == false\n end",
"def ... | [
"0.7561939",
"0.72606474",
"0.7073098",
"0.7072798",
"0.70500773",
"0.7023119",
"0.7020617",
"0.69700027",
"0.69700027",
"0.69700027",
"0.6890316",
"0.688445",
"0.6865251",
"0.6842063",
"0.6815164",
"0.68069416",
"0.6802031",
"0.67936045",
"0.6778365",
"0.6775665",
"0.6775315... | 0.6603993 | 31 |
Total number of words in the pull request title and description | def description_complexity(pr)
pull_req = pull_req_entry(pr[:id])
(pull_req['title'] + ' ' + pull_req['body']).gsub(/[\n\r]\s+/, ' ').split(/\s+/).size
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def total_words\n self.title.to_s.count_words + self.content.to_s.count_words\n end",
"def get_total_readme_word_count(data)\n total_word_count = 0\n if data['readme_raw_text'].nil?\n total_word_count = 0\n else\n readme_text = Base64.decode64(data['readme_raw_text']['content'])\n readme_text = r... | [
"0.727325",
"0.7205276",
"0.70367414",
"0.6869618",
"0.6696522",
"0.66663575",
"0.66235113",
"0.65030825",
"0.64799494",
"0.64788496",
"0.63634485",
"0.63634485",
"0.63377833",
"0.62976307",
"0.628013",
"0.6260527",
"0.6205044",
"0.6167149",
"0.6135621",
"0.6117096",
"0.60976... | 0.7479853 | 0 |
Total number of pull requests still open in each project at pull request creation time. | def workload(pr)
q = <<-QUERY
select count(distinct(prh.pull_request_id)) as num_open
from pull_request_history prh, pull_requests pr, pull_request_history prh3
where prh.created_at < prh3.created_at
and prh.action = 'opened'
and pr.id = prh.pull_request_id
and prh3.pull_request_id = ?
and (exists (select * from pull_request_history prh1
where prh1.action = 'closed'
and prh1.pull_request_id = prh.pull_request_id
and prh1.created_at > prh3.created_at)
or not exists (select * from pull_request_history prh1
where prh1.action = 'closed'
and prh1.pull_request_id = prh.pull_request_id)
)
and pr.base_repo_id = (select pr3.base_repo_id from pull_requests pr3 where pr3.id = ?)
QUERY
db.fetch(q, pr[:id], pr[:id]).first[:num_open]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_pull_request_count(repo_id)\n pull_request_count = client.pull_requests(repo_id, :state => 'open').length\n rescue Octokit::NotFound\n nil\n end",
"def get_pull_request_count(project_id_or_key, repository_id_or_name)\n get(\"projects/#{project_id_or_key}/git/repositories/#{repository_id_or... | [
"0.7719369",
"0.74763167",
"0.73494357",
"0.7124827",
"0.7082453",
"0.6948145",
"0.687069",
"0.67227995",
"0.67099226",
"0.6656021",
"0.66164994",
"0.6600101",
"0.6528424",
"0.64959455",
"0.6495222",
"0.64876676",
"0.64606994",
"0.64593494",
"0.64276254",
"0.6426242",
"0.6380... | 0.59211814 | 55 |
Check if the pull request is intra_branch | def intra_branch?(pr)
q = <<-QUERY
select IF(base_repo_id = head_repo_id, true, false) as intra_branch
from pull_requests where id = ?
QUERY
db.fetch(q, pr[:id]).first[:intra_branch]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def intra_branch?(pr_id)\n q = <<-QUERY\n select IF(base_repo_id = head_repo_id, true, false) as intra_branch\n from pull_requests where id = ?\n QUERY\n if_empty(db.fetch(q, pr_id).all, :intra_branch)\n end",
"def pr_is_intra_branch(req)\n return false unless pr_has_head_repo(req)\n\n ... | [
"0.83154595",
"0.7904094",
"0.77689546",
"0.6816682",
"0.68065727",
"0.6737266",
"0.672144",
"0.66518724",
"0.6628448",
"0.6625893",
"0.6603046",
"0.65730083",
"0.65622723",
"0.65410274",
"0.6470436",
"0.6422021",
"0.6422021",
"0.6393189",
"0.63638103",
"0.6299155",
"0.629797... | 0.8336119 | 0 |
Check if the requester is part of the project's main team | def main_team_member?(pr, months_back)
(committer_team(pr, months_back) + merger_team(pr, months_back)).uniq.include? requester(pr)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def check_if_user_is_project_leader?\n if current_user.id != this_project.project_leader_id;\n render json: [\"Only the Project Lead can edit this project\"]\n end\n end",
"def allow_request_membership?(user = User.current_user)\n user.present? &&\n project_administrators.any? &&\n !... | [
"0.70613414",
"0.7020525",
"0.70115155",
"0.6988604",
"0.69573426",
"0.68982166",
"0.6797121",
"0.6781425",
"0.67618895",
"0.6685745",
"0.66769564",
"0.6643913",
"0.66271657",
"0.6617461",
"0.6612781",
"0.65984887",
"0.65775263",
"0.65655184",
"0.65389514",
"0.6535275",
"0.64... | 0.76905894 | 0 |
Various statistics for the pull request. Returned as Hash with the following keys: :lines_added, :lines_deleted, :files_added, :files_removed, :files_modified, :files_touched, :src_files, :doc_files, :other_files. | def pr_stats(pr)
pr_id = pr[:id]
raw_commits = commit_entries(pr_id)
result = Hash.new(0)
def file_count(commits, status)
commits.map do |c|
c['files'].reduce(Array.new) do |acc, y|
if y['status'] == status then acc << y['filename'] else acc end
end
end.flatten.uniq.size
end
def files_touched(commits)
commits.map do |c|
c['files'].map do |y|
y['filename']
end
end.flatten.uniq.size
end
def file_type(f)
lang = Linguist::Language.find_by_filename(f)
if lang.empty? then :data else lang[0].type end
end
def file_type_count(commits, type)
commits.map do |c|
c['files'].reduce(Array.new) do |acc, y|
if file_type(y['filename']) == type then acc << y['filename'] else acc end
end
end.flatten.uniq.size
end
def lines(commit, type, action)
commit['files'].select do |x|
next unless file_type(x['filename']) == :programming
case type
when :test
true if test_file_filter.call(x['filename'])
when :src
true unless test_file_filter.call(x['filename'])
else
false
end
end.reduce(0) do |acc, y|
diff_start = case action
when :added
"+"
when :deleted
"-"
end
acc += unless y['patch'].nil?
y['patch'].lines.select{|x| x.start_with?(diff_start)}.size
else
0
end
acc
end
end
raw_commits.each{ |x|
next if x.nil?
result[:lines_added] += lines(x, :src, :added)
result[:lines_deleted] += lines(x, :src, :deleted)
result[:test_lines_added] += lines(x, :test, :added)
result[:test_lines_deleted] += lines(x, :test, :deleted)
}
result[:files_added] += file_count(raw_commits, "added")
result[:files_removed] += file_count(raw_commits, "removed")
result[:files_modified] += file_count(raw_commits, "modified")
result[:files_touched] += files_touched(raw_commits)
result[:src_files] += file_type_count(raw_commits, :programming)
result[:doc_files] += file_type_count(raw_commits, :markup)
result[:other_files] += file_type_count(raw_commits, :data)
result
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def pr_stats(pr_id)\n\n raw_commits = commit_entries(pr_id)\n result = Hash.new(0)\n\n def file_count(commits, status)\n commits.map do |c|\n c['files'].reduce(Array.new) do |acc, y|\n if y['status'] == status then acc << y['filename'] else acc end\n end\n end.flatten.uniq... | [
"0.73151946",
"0.6118038",
"0.60847896",
"0.60104066",
"0.5994874",
"0.59663105",
"0.5904327",
"0.5864714",
"0.5816958",
"0.57729346",
"0.5749894",
"0.5748126",
"0.5712928",
"0.57114",
"0.56568396",
"0.565159",
"0.5641704",
"0.55990905",
"0.5576634",
"0.5575222",
"0.54879695"... | 0.7263666 | 1 |
Return a hash of file names and commits on those files in the period between pull request open and months_back. The returned results do not include the commits comming from the PR. | def commits_on_pr_files(pr, months_back)
oldest = Time.at(Time.at(pr[:created_at]).to_i - 3600 * 24 * 30 * months_back)
pr_against = pull_req_entry(pr[:id])['base']['sha']
commits = commit_entries(pr[:id])
commits_per_file = commits.flat_map { |c|
c['files'].map { |f|
[c['sha'], f['filename']]
}
}.group_by {|c|
c[1]
}
commits_per_file.keys.reduce({}) do |acc, filename|
commits_in_pr = commits_per_file[filename].map{|x| x[0]}
walker = Rugged::Walker.new(repo)
walker.sorting(Rugged::SORT_DATE)
walker.push(pr_against)
commit_list = walker.take_while do |c|
c.time > oldest
end.reduce([]) do |acc1, c|
if c.diff(paths: [filename.to_s]).size > 0 and
not commits_in_pr.include? c.oid
acc1 << c.oid
end
acc1
end
acc.merge({filename => commit_list})
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def commits_on_pr_files(pr, months_back)\n\n oldest = Time.at(Time.at(pr[:created_at]).to_i - 3600 * 24 * 30 * months_back)\n commits = commit_entries(pr, at_open = true)\n\n commits_per_file = commits.flat_map { |c|\n unless c[:files].nil?\n JSON.parse(c[:files]).map { |f|\n [c[:sha]... | [
"0.781885",
"0.7199903",
"0.70200026",
"0.6888724",
"0.6694707",
"0.66847",
"0.6614441",
"0.651509",
"0.64553577",
"0.6392091",
"0.63737345",
"0.63438314",
"0.63361645",
"0.625318",
"0.62346965",
"0.616038",
"0.6153741",
"0.6114814",
"0.6090063",
"0.6048055",
"0.6045349",
"... | 0.7996276 | 0 |
Number of unique commits on the files changed by the pull request between the time the PR was created and `months_back` excluding those created by the PR | def commits_on_files_touched(pr, months_back)
commits_on_pr_files(pr, months_back).reduce([]) do |acc, commit_list|
acc + commit_list[1]
end.flatten.uniq.size
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def commits_to_hottest_file(pr, months_back)\n a = commits_on_pr_files(pr, months_back).map{|x| x}.sort_by { |x| x[1].size}\n a.last[1].size\n end",
"def commits_last_x_months(pr, exclude_pull_req, months_back)\n q = <<-QUERY\n select count(c.id) as num_commits\n from projects p, commits c, proje... | [
"0.7478312",
"0.74050605",
"0.7130097",
"0.7068469",
"0.688963",
"0.6404193",
"0.6253267",
"0.6130436",
"0.6086234",
"0.602666",
"0.6025023",
"0.60062444",
"0.5991419",
"0.5978309",
"0.59181386",
"0.58950794",
"0.5843252",
"0.5819494",
"0.58091956",
"0.5800191",
"0.57967657",... | 0.8009828 | 0 |
Number of commits to the hottest file between the time the PR was created and `months_back` | def commits_to_hottest_file(pr, months_back)
a = commits_on_pr_files(pr, months_back).map{|x| x}.sort_by { |x| x[1].size}
a.last[1].size
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def commits_last_x_months(pr, exclude_pull_req, months_back)\n q = <<-QUERY\n select count(c.id) as num_commits\n from projects p, commits c, project_commits pc, pull_requests pr,\n pull_request_history prh\n where p.id = pc.project_id\n and pc.commit_id = c.id\n and p.id = pr.base_re... | [
"0.7241653",
"0.7077607",
"0.69883245",
"0.6766744",
"0.6681246",
"0.62665546",
"0.6252305",
"0.6223784",
"0.6158122",
"0.61194336",
"0.60626364",
"0.60576135",
"0.6004553",
"0.598962",
"0.59739745",
"0.5972697",
"0.5885617",
"0.5883284",
"0.58514494",
"0.5824795",
"0.5816643... | 0.7847387 | 0 |
Total number of commits on the project in the period up to `months` before the pull request was opened. `exclude_pull_req` controls whether commits from pull requests should be accounted for. | def commits_last_x_months(pr, exclude_pull_req, months_back)
q = <<-QUERY
select count(c.id) as num_commits
from projects p, commits c, project_commits pc, pull_requests pr,
pull_request_history prh
where p.id = pc.project_id
and pc.commit_id = c.id
and p.id = pr.base_repo_id
and prh.pull_request_id = pr.id
and prh.action = 'opened'
and c.created_at < prh.created_at
and c.created_at > DATE_SUB(prh.created_at, INTERVAL #{months_back} MONTH)
and pr.id=?
QUERY
if exclude_pull_req
q << ' and not exists (select * from pull_request_commits prc1 where prc1.commit_id = c.id)'
end
db.fetch(q, pr[:id]).first[:num_commits]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def commits_last_x_months(pr_id, exclude_pull_req, months)\n q = <<-QUERY\n select count(c.id) as num_commits\n from projects p, commits c, project_commits pc, pull_requests pr,\n pull_request_history prh\n where p.id = pc.project_id\n and pc.commit_id = c.id\n and p.id = pr.base_repo... | [
"0.77199876",
"0.6266429",
"0.6101819",
"0.6030194",
"0.5894944",
"0.58628196",
"0.5827384",
"0.5808126",
"0.57765794",
"0.57693636",
"0.5710904",
"0.5699551",
"0.56647474",
"0.5644427",
"0.5577699",
"0.5550544",
"0.5528479",
"0.5516496",
"0.55154353",
"0.55106145",
"0.549379... | 0.72651494 | 1 |
JSON objects for the commits included in the pull request | def commit_entries(pr_id)
q = <<-QUERY
select c.sha as sha
from pull_requests pr, pull_request_commits prc, commits c
where pr.id = prc.pull_request_id
and prc.commit_id = c.id
and pr.id = ?
QUERY
commits = db.fetch(q, pr_id).all
commits.reduce([]){ |acc, x|
a = mongo['commits'].find_one({:sha => x[:sha]})
acc << a unless a.nil?
acc
}.select{|c| c['parents'].size <= 1}
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def commits\n @commits ||= ApiFactory.new 'GitData::Commits'\n end",
"def pull_request_commits\n @octokit.pull_request_commits(@repository.full_name, @pull_request.number)\n end",
"def branch_commits\n\tcommits = {}\n\tresp = github_api(\"branches\")\n\tresp.each do |b|\n\t\t#puts b\n\t\tcommit_dat... | [
"0.73001933",
"0.72377795",
"0.7151705",
"0.7052108",
"0.70270604",
"0.69444156",
"0.68686634",
"0.6841529",
"0.6815554",
"0.6798025",
"0.6655812",
"0.65396124",
"0.6530324",
"0.6504571",
"0.6461776",
"0.6431479",
"0.6412393",
"0.6410855",
"0.63919294",
"0.6319299",
"0.631577... | 0.6408595 | 18 |
List of files in a project checkout. Filter is an optional binary function that takes a file entry and decides whether to include it in the result. | def files_at_commit(pr_id, filter = lambda{true})
q = <<-QUERY
select c.sha
from pull_requests p, commits c
where c.id = p.base_commit_id
and p.id = ?
QUERY
def lslr(tree, path = '')
all_files = []
for f in tree.map{|x| x}
f[:path] = path + '/' + f[:name]
if f[:type] == :tree
begin
all_files << lslr(repo.lookup(f[:oid]), f[:path])
rescue Exception => e
STDERR.puts e
all_files
end
else
all_files << f
end
end
all_files.flatten
end
base_commit = db.fetch(q, pr_id).all[0][:sha]
begin
files = lslr(repo.lookup(base_commit).tree)
files.select{|x| filter.call(x)}
rescue Exception => e
STDERR.puts "Cannot find commit #{base_commit} in base repo"
[]
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def filtered(files); end",
"def files &filter_block\n Dir[File.join(path, '**/*')].\n reject{|f| File.directory?(f) }.\n select{|f| f =~ filter_re }.\n sort.reverse[0..MAX_FILES].\n select(&filter_block)\n end",
"def files_filtering files\n ... | [
"0.65693074",
"0.6304924",
"0.6183348",
"0.61464906",
"0.59771967",
"0.59771967",
"0.59771967",
"0.58480406",
"0.58436716",
"0.5832988",
"0.57402414",
"0.57329553",
"0.573051",
"0.5715228",
"0.57029176",
"0.5689777",
"0.5669269",
"0.5664704",
"0.5633246",
"0.5575321",
"0.5544... | 0.6002325 | 4 |
Returns all comments for the issue sorted by creation date ascending | def issue_comments(owner, repo, pr_id)
Thread.current[:issue_id] ||= pr_id
if pr_id != Thread.current[:issue_id]
Thread.current[:issue_id] = pr_id
Thread.current[:issue_cmnt] = nil
end
Thread.current[:issue_cmnt] ||= Proc.new {
issue_comments = mongo['issue_comments']
ic = issue_comments.find(
{'owner' => owner, 'repo' => repo, 'issue_id' => pr_id.to_i},
{:fields => {'body' => 1, 'created_at' => 1, '_id' => 0},
:sort => {'created_at' => :asc}}
).map {|x| x}
}.call
Thread.current[:issue_cmnt]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_comments\n return Comment.where(design_problem_id: self.id).sort_by! { |x| x.created_at }.sort! { |a,b| b.created_at <=> a.created_at }\n end",
"def comments\n Comment.query({item_id: _id}, {sort: [[:created_at, -1]]})\n end",
"def comments\n if has_comments?\n @repository.load(:c... | [
"0.8048834",
"0.729173",
"0.7140348",
"0.7051767",
"0.7001586",
"0.6949368",
"0.69236344",
"0.68443245",
"0.6817531",
"0.6807687",
"0.67978317",
"0.66772115",
"0.6604261",
"0.6492646",
"0.6492646",
"0.6491285",
"0.64813614",
"0.6445192",
"0.64402235",
"0.64392227",
"0.6412062... | 0.6137324 | 31 |
Clone or update, if already cloned, a git repository | def clone(user, repo, update = false)
def spawn(cmd)
proc = IO.popen(cmd, 'r')
proc_out = Thread.new {
while !proc.eof
STDERR.puts "#{proc.gets}"
end
}
proc_out.join
end
checkout_dir = File.join('cache', user, repo)
begin
repo = Rugged::Repository.new(checkout_dir)
if update
spawn("cd #{checkout_dir} && git pull")
end
repo
rescue
spawn("git clone git://github.com/#{user}/#{repo}.git #{checkout_dir}")
Rugged::Repository.new(checkout_dir)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def git_clone\n\n Rugged::Repository.clone_at git_url, root_path.to_s if git_url\n end",
"def clone_repository\n if File.directory?(base_path)\n repo = ::Git.open(base_path)\n repo.checkout(\"master\")\n repo.pull\n repo.fetch\n else\n ::Git.clone(url, base_pa... | [
"0.7644012",
"0.7611376",
"0.75720984",
"0.75720984",
"0.75073874",
"0.7234737",
"0.71738327",
"0.7139327",
"0.7131093",
"0.7126073",
"0.7109488",
"0.70645773",
"0.7043592",
"0.7011788",
"0.69599336",
"0.69597834",
"0.6927984",
"0.692622",
"0.69216824",
"0.69213414",
"0.68929... | 0.7157811 | 7 |
[buff] is an array of file lines, with empty lines stripped [regexp] is a regexp or an array of regexps to match multiline comments | def count_multiline_comments(buff, regexp)
unless regexp.is_a?(Array) then regexp = [regexp] end
regexp.reduce(0) do |acc, regexp|
acc + buff.reduce(''){|acc,x| acc + x}.scan(regexp).map { |x|
x.map{|y| y.lines.count}.reduce(0){|acc,y| acc + y}
}.reduce(0){|acc, x| acc + x}
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def count_single_line_comments(buff, comment_regexp)\n a = buff.select { |l|\n not l.match(comment_regexp).nil?\n }.size\n a\n end",
"def count_single_line_comments(buff, comment_regexp)\n a = buff.select { |l|\n not l.match(comment_regexp).nil?\n }.size\n a\n end",
"def mark_comm... | [
"0.7220487",
"0.7220487",
"0.64149064",
"0.6391211",
"0.6391211",
"0.6391211",
"0.63559884",
"0.6342028",
"0.61176664",
"0.60395896",
"0.598893",
"0.59461075",
"0.5914054",
"0.5913262",
"0.58681345",
"0.5719369",
"0.571608",
"0.5704196",
"0.56952703",
"0.5685539",
"0.5657409"... | 0.7407011 | 1 |
[buff] is an array of file lines, with empty lines stripped | def count_single_line_comments(buff, comment_regexp)
a = buff.select { |l|
not l.match(comment_regexp).nil?
}.size
a
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def clean_lines\n lines = @file_content.split(\"\\r\").select { |line| !line.match(/$\\s*#/)} unless @file_content.nil?\n end",
"def remove_blank_lines!(file_lines)\n raise ArgumentError.new(\"Input must not be nil\") if file_lines.nil?\n\n # delete_if method modifies original array\n ... | [
"0.66224545",
"0.6090969",
"0.60714406",
"0.60445267",
"0.60168856",
"0.6004003",
"0.5864593",
"0.5833146",
"0.5811307",
"0.57681733",
"0.5731601",
"0.56382996",
"0.56187075",
"0.5569803",
"0.5557382",
"0.5539056",
"0.55354196",
"0.5534495",
"0.55318904",
"0.5514759",
"0.5508... | 0.0 | -1 |
Return a function filename > Boolean, that determines whether a filename is a test file | def test_file_filter
raise Exception.new("Unimplemented")
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_file(path)\n return File.file?(path)\nend",
"def file? filepath\n self.system \"test -f #{filepath}\"\n end",
"def test_file?(path)\n @tests_files.include?(path)\n end",
"def file?() end",
"def validFile? filename\n if !filename.kind_of? String\n return false\n elsi... | [
"0.80507016",
"0.74835265",
"0.7364322",
"0.72650015",
"0.714615",
"0.7119923",
"0.7091479",
"0.70687366",
"0.69874424",
"0.6907231",
"0.6879069",
"0.6811271",
"0.67744863",
"0.6745622",
"0.6729327",
"0.66965127",
"0.66880065",
"0.6682768",
"0.66750973",
"0.6671297",
"0.66556... | 0.0 | -1 |
TODO: Just for beta testers and should not be visible for final user. | def show_coins_list
coins_list(coins)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def private; end",
"def specie; end",
"def specie; end",
"def specie; end",
"def specie; end",
"def spec; end",
"def spec; end",
"def probers; end",
"def schubert; end",
"def refutal()\n end",
"def weber; end",
"def who_we_are\r\n end",
"def strategy; end",
"def implementation; end",
... | [
"0.7406174",
"0.6630762",
"0.6630762",
"0.6630762",
"0.6630762",
"0.6445482",
"0.6445482",
"0.6383004",
"0.63031936",
"0.6294341",
"0.6115796",
"0.60628986",
"0.60034996",
"0.5995726",
"0.5995726",
"0.5994637",
"0.5994637",
"0.5994637",
"0.5994637",
"0.5994637",
"0.5994637",
... | 0.0 | -1 |
Replace this with your real tests. | def test_truth
assert true
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def testing\n # ...\n end",
"def __dummy_test__\n end",
"def tests; end",
"def tests; end",
"def spec; end",
"def spec; end",
"def self_test; end",
"def self_test; end",
"def test \n end",
"def test_0_dummy\n\t\tend",
"def test\n\n end",
"def test\n end",
"def test\n end"... | [
"0.7446459",
"0.6956364",
"0.69155836",
"0.69155836",
"0.6864151",
"0.6864151",
"0.66406286",
"0.66406286",
"0.66253287",
"0.6547665",
"0.6524571",
"0.6484549",
"0.6484549",
"0.6484549",
"0.6403847",
"0.6389188",
"0.6389188",
"0.6389188",
"0.6389188",
"0.6389188",
"0.6389188"... | 0.0 | -1 |
Recursive search through one route of the tree Here if it doesn't find a child in the dimension with the label we should move to its last dimension and try and find a child there with that label | def search(node_id, dim, value, position)
Base.log("Entered search", "from node #{node_id}, looking for #{value} in #{dim} at position #{position}")
dimensions = order(tree.nodes.dimensions(node_id))
# if node has a child or link pointing
# to dim labeled value return it
if dimensions.include?(dim)
if child = tree.nodes.child(node_id, dim, value)
Base.log("Found dimension with child", "#{child}:#{value} at #{dim}")
return child
else
Base.log("Found dimension without child", "#{value} at #{dim}")
end
else
Base.log("Didn't find an edge", "#{dim} from node #{node_id}")
end
return nil if dimensions.empty?
last_name = tree.nodes.children(node_id, dimensions.last).last
last_node = tree.nodes.child(node_id, dimensions.last, last_name)
if last_node.nil?
Base.log("Didn't find any child nodes of the last dimension", "#{dimensions.last} of node #{node_id}")
return last_node
else
Base.log("Recursively searching", "#{last_name}")
return search(last_node, dim, value, position)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def DFS(root, target)\n ## base case: \n return nil if root.nil?\n return root if root.value == target\n ##indecutive step: \n ## DFS on the left side then DFS on the right side \n root.children.each do |child|\n search_result = DFS(child, target) ## better to save the actual value then ch... | [
"0.61833626",
"0.6084682",
"0.60382634",
"0.5937286",
"0.5917319",
"0.5900532",
"0.5836013",
"0.576834",
"0.57447493",
"0.5736283",
"0.5727365",
"0.57259536",
"0.56953925",
"0.5666499",
"0.5661071",
"0.5659128",
"0.56366515",
"0.56347597",
"0.56289",
"0.5609312",
"0.55901825"... | 0.563477 | 17 |
Here we continue to select the last node out of the last dimension until we reach a value | def search_measures(node_id, measures)
Base.log("Entered search measures", "#{node_id}, #{measures}")
if tree.nodes.measures(node_id).any?
Base.log("Found measures in #{node_id}")
values = {}
measures.each do |selected|
values[selected] = tree.nodes.measure(node_id, selected)
end
return values
else
Base.log("Didn't find any measures in #{node_id}")
dimensions = order(tree.nodes.dimensions(node_id))
Base.log("node #{node_id} has dimensions #{dimensions.inspect}")
if dimensions.nil?
Base.log("Didn't find any dimension from #{node_id}")
return nil
else
next_name = tree.nodes.children(node_id, dimensions.last).last
next_node = tree.nodes.child(node_id, dimensions.last, next_name)
if next_node.nil?
return next_node
else
Base.log("Recursing to search for measures in #{next_node}")
return search_measures(next_node, measures)
end
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def kth_to_last(node, k)\n return nil if node.value == nil\n\n fast_cursor = node\n slow_cursor = node\n\n k.times { fast_cursor = fast_cursor.next }\n\n until fast_cursor.nil?\n fast_cursor = fast_cursor.next\n slow_cursor = slow_cursor.next\n end\n\n slow_cursor.value\nend",
... | [
"0.63042134",
"0.6261147",
"0.6081858",
"0.5894412",
"0.5833185",
"0.57765836",
"0.5755087",
"0.57455486",
"0.57347625",
"0.5709944",
"0.56815904",
"0.56783813",
"0.5678262",
"0.56441563",
"0.56395715",
"0.56286246",
"0.5612148",
"0.5612126",
"0.5599876",
"0.55899346",
"0.556... | 0.0 | -1 |
Returns the last dimension for which this query has a value other than an implied '' specified | def last_specified_position
tree.dimensions.length.downto(1) do |index|
return index - 1 if conditions[tree.dimensions[index - 1]] != '*'
end
return -1
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def find_unlimited_dimension\n NetCDF::Dimension.new(@netcdf_elmt.getUnlimitedDimension())\n end",
"def unlimited_dimension?\n @netcdf_elmt.hasUnlimitedDimension() \n end",
"def get_axis(dimension)\n nil\n end",
"def dimensionality\n return nil if self.points.count == 0\n ... | [
"0.5855188",
"0.576615",
"0.5765505",
"0.55257887",
"0.54617494",
"0.5295059",
"0.5183735",
"0.51833045",
"0.5149452",
"0.51178247",
"0.51134056",
"0.50946605",
"0.5089972",
"0.5089297",
"0.50776696",
"0.50757456",
"0.5042183",
"0.5042183",
"0.5042183",
"0.5030667",
"0.502517... | 0.60016376 | 0 |
Prevents the child instance from being created without a parent instance. Reloads the object to avoid having the attributes hash pick up the extra columns when the child is saved for the first time. | def create
if self.send(self.class.parent_association_name).valid?
self.send(self.class.parent_association_name).save!
self.id = self.send(self.class.parent_association_name).id
self.instance_variable_set(:@new_record, false)
ret = self.save
self.reload
return ret
else
self.errors.add(self.class.parent_association_name, 'has errors')
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def child_attributes\n super.merge(\n schema: ->(t) { t.references :child, foreign_key: true },\n model: ->(m) {\n belongs_to :child, inverse_of: :parent, dependent: :destroy\n has_one :parent, inverse_of: :child, class_name: self.name\n },\n viewmodel: ->(_v) {\n... | [
"0.71397877",
"0.62769413",
"0.6215587",
"0.62116843",
"0.617348",
"0.6122293",
"0.60706776",
"0.60380137",
"0.60223645",
"0.60101056",
"0.5965975",
"0.5940833",
"0.5922641",
"0.5919059",
"0.59122634",
"0.5894067",
"0.58906996",
"0.5851186",
"0.5836096",
"0.5821086",
"0.58070... | 0.60835695 | 6 |
Limits the columns for Child to primary_key and child__ columns | def columns
unless defined?(@columns) && @columns
@columns = connection.columns(table_name, "#{name} Columns").select do |column|
column.name =~ Regexp.new("^#{self.to_s.underscore}__") || column.name == primary_key
end
@columns.each { |column| column.primary = column.name == primary_key }
end
@columns
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def child_attributes\n super.merge(\n schema: ->(t) { t.references :child, foreign_key: true },\n model: ->(m) {\n belongs_to :child, inverse_of: :parent, dependent: :destroy\n has_one :parent, inverse_of: :child, class_name: self.name\n },\n viewmodel: ->(_v) {\n... | [
"0.6927442",
"0.68683344",
"0.6360025",
"0.6234488",
"0.59825134",
"0.5873888",
"0.5861839",
"0.584672",
"0.5826241",
"0.58216697",
"0.58084595",
"0.5797248",
"0.57721233",
"0.5726277",
"0.570092",
"0.5678889",
"0.5659672",
"0.56163746",
"0.5608843",
"0.56052244",
"0.5600328"... | 0.62832314 | 3 |
Aliases dynamic attribute methods to remove 'class__' from method name | def define_attribute_methods
super
self.generated_methods.each do |method|
if method.to_s =~ Regexp.new("^#{self.to_s.underscore}__")
new_method_name = $~.post_match
alias_method(new_method_name, method)
private method
self.generated_methods << new_method_name
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def define_attribute_methods(attr)\n name = attr.name\n return if @attr_methods.has_key?(name)\n ([name] + attr.aliases).each do |ali|\n @attr_methods[ali] = name\n @attr_aliases[Inflector.underscore(ali)] = name\n @normalized_attr_names[normalize_attribute_name(ali)] = name\n ... | [
"0.6608754",
"0.6562694",
"0.6562694",
"0.64511454",
"0.6426412",
"0.64023167",
"0.6389525",
"0.6326438",
"0.6307987",
"0.63063806",
"0.6258632",
"0.62415063",
"0.61985856",
"0.61607707",
"0.6160028",
"0.61561626",
"0.61505026",
"0.6149396",
"0.6099546",
"0.6093995",
"0.60880... | 0.64180315 | 5 |
NOTE DM Adapter dependent DM creates MySQL tables case insensitive by default | def find_by_credentials(credentials)
credential = credentials[0].dup
credential.downcase! if @klass.sorcery_config.downcase_username_before_authenticating
@klass.sorcery_config.username_attribute_names.each do |name|
@user = @klass.first(name => credential)
break if @user
end
!!@user ? @klass.get(@user.id) : nil
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def db_table\n name = (\"evaldata_\" + term.title + \"_\" + title).strip\n name = ActiveSupport::Inflector.transliterate(name).downcase\n name.gsub(/[^a-z0-9_]+/, \"_\")\n end",
"def table_name_prefix; end",
"def create_table_statement(model)\n \"#{super} ENGINE = InnoDB CHARACTER SET #{char... | [
"0.65100163",
"0.6307187",
"0.6196754",
"0.6135074",
"0.60585356",
"0.6047761",
"0.6029418",
"0.5868553",
"0.57940096",
"0.57815707",
"0.5777005",
"0.5770877",
"0.57469016",
"0.57320887",
"0.57185316",
"0.5710127",
"0.57057565",
"0.56791747",
"0.5666355",
"0.5645109",
"0.5630... | 0.0 | -1 |
NOTE DM Adapter dependent | def get_current_users
unless @klass.repository.adapter.is_a?(::DataMapper::Adapters::MysqlAdapter)
raise 'Unsupported DataMapper Adapter'
end
config = @klass.sorcery_config
ret = @klass.all(config.last_logout_at_attribute_name => nil) |
@klass.all(config.last_activity_at_attribute_name.gt => config.last_logout_at_attribute_name)
ret = ret.all(config.last_activity_at_attribute_name.not => nil)
ret = ret.all(config.last_activity_at_attribute_name.gt => config.activity_timeout.seconds.ago.utc)
ret
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def do_adapter_specific_setup; end",
"def adapter_initialize\n end",
"def initialize(adapter); end",
"def default_adapter=(adapter); end",
"def default_adapter=(adapter); end",
"def default_adapter=(adapter); end",
"def adapter\n @adapter\n end",
"def bi_service\n end",
"def adapter\n ... | [
"0.6574465",
"0.6149335",
"0.6115737",
"0.5958607",
"0.5958607",
"0.5958607",
"0.58520186",
"0.57970285",
"0.5745512",
"0.57136744",
"0.57136744",
"0.56665456",
"0.56665456",
"0.56665456",
"0.5641203",
"0.56239253",
"0.5615376",
"0.5615376",
"0.5563482",
"0.55569077",
"0.5539... | 0.0 | -1 |
GET /songs GET /songs.json | def index
if params[:q].present?
@songs = Song.search(params[:q])
else
@songs = Song.all
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def songs\n \t@songs = Album.find(params[:id])\n songs = RSpotify::Track.search(@songs.name)\n \t@songs = songs.map do |s_songs|\n \tSong.new_from_spotify_song(s_songs)\n end\n render json: {data:@songs}\n end",
"def index\n \n @songs = Song.order 'id'\n\n respond_to do |format|\n f... | [
"0.80239767",
"0.7723038",
"0.76394385",
"0.75777835",
"0.75201935",
"0.7517737",
"0.74526227",
"0.74322164",
"0.7406643",
"0.7387381",
"0.71902865",
"0.7173646",
"0.7173646",
"0.7173646",
"0.7173646",
"0.71091557",
"0.70978016",
"0.7096731",
"0.7045539",
"0.7045539",
"0.7045... | 0.69005704 | 49 |
def show end GET /songs/new | def new
@song = Song.new
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new\n @song = @playlist.songs.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @song }\n end\n end",
"def new\n @song = Song.new\n\n respond_to do |format|\n format.html # new.html.erb\n end\n end",
"def new\n @song = Song.new\n\... | [
"0.8389444",
"0.83243036",
"0.8316805",
"0.8316805",
"0.8316805",
"0.8316805",
"0.82980275",
"0.82980275",
"0.8088837",
"0.8054444",
"0.8054444",
"0.8054444",
"0.8054444",
"0.8054444",
"0.8054444",
"0.79302746",
"0.78313476",
"0.7775184",
"0.7775184",
"0.7775184",
"0.7725575"... | 0.81266946 | 11 |
Use callbacks to share common setup or constraints between actions. | def set_song
@song = Song.find(params[:id])
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_required_actions\n # TODO: check what fields change to asign required fields\n end",
"def action_hook; end",
"def run_actions; end",
"def define_action_hook; end",
"def actions; end",
"def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_... | [
"0.6163163",
"0.6045976",
"0.5946146",
"0.591683",
"0.5890051",
"0.58349305",
"0.5776858",
"0.5703237",
"0.5703237",
"0.5652805",
"0.5621621",
"0.54210985",
"0.5411113",
"0.5411113",
"0.5411113",
"0.5391541",
"0.53794575",
"0.5357573",
"0.53402257",
"0.53394014",
"0.53321576"... | 0.0 | -1 |
Never trust parameters from the scary internet, only allow the white list through. | def song_params
params.require(:song).permit(:number, :title, :composer, :sopran, :alt, :tenor, :bass, :canon)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def strong_params\n params.require(:user).permit(param_whitelist)\n end",
"def strong_params\n params.require(:listing_member).permit(param_whitelist)\n end",
"def allow_params_authentication!; end",
"def allowed_params\n ALLOWED_PARAMS\n end",
"def default_param_whitelist\n [\"mode\"]\n... | [
"0.69792545",
"0.6781151",
"0.67419964",
"0.674013",
"0.6734356",
"0.6591046",
"0.6502396",
"0.6496313",
"0.6480641",
"0.6477825",
"0.64565",
"0.6438387",
"0.63791263",
"0.63740575",
"0.6364131",
"0.63192815",
"0.62991166",
"0.62978333",
"0.6292148",
"0.6290449",
"0.6290076",... | 0.0 | -1 |
use to modify hexes based on optional rules | def optional_hexes
game_hexes
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def hex() end",
"def set_hex(clean_hex_str)\n @red = clean_hex_str[0, 2].to_i(16)\n @green = clean_hex_str[2, 2].to_i(16)\n @blue = clean_hex_str[4, 2].to_i(16)\n end",
"def hex!\n @hex = true\n end",
"def hexadecimal!\n # -> uncomment the next line to manually enable rule ... | [
"0.6358642",
"0.62328464",
"0.6086142",
"0.6037317",
"0.6028119",
"0.6028119",
"0.6015737",
"0.5791498",
"0.5701557",
"0.56692934",
"0.56466585",
"0.5612995",
"0.5605454",
"0.5535129",
"0.55080694",
"0.5414474",
"0.5408341",
"0.5371297",
"0.53646934",
"0.53610164",
"0.5360703... | 0.6083298 | 3 |
use to modify location names based on optional rules | def location_name(coord)
self.class::LOCATION_NAMES[coord]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_update_location_name_errors\n params = update_params_from_loc(locations(:albion))\n params[:location][:display_name] = \"Somewhere Dubious\"\n update_location_error(params)\n end",
"def location_name=(loc_attribute)\n # byebug\n if !loc_attribute.blank?\n self.location = Location.fin... | [
"0.6250166",
"0.5905971",
"0.58514583",
"0.5824623",
"0.5824623",
"0.5824623",
"0.5800147",
"0.5747493",
"0.56771225",
"0.56649095",
"0.56343555",
"0.56087255",
"0.5556413",
"0.5503434",
"0.5494735",
"0.5484667",
"0.5467317",
"0.544179",
"0.5425922",
"0.54225",
"0.54060763",
... | 0.0 | -1 |
use to modify tiles based on optional rules | def optional_tiles; end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_tiles\n if @game_state_model::game_type == :classic\n @tiles = @tilesClassic\n elsif @game_state_model::game_type == :otto\n @tiles = @tilesOtto\n end\n end",
"def apply_tileset(tilemap, ts)\n tilemap.tileset = RPG::Cache.tileset(ts.tileset_name)\n ts.autotile_names.each_w... | [
"0.6417361",
"0.61875075",
"0.6048957",
"0.58328784",
"0.57986164",
"0.57176316",
"0.562217",
"0.55441266",
"0.55360913",
"0.5528822",
"0.55224943",
"0.5481565",
"0.5453549",
"0.54517937",
"0.5449393",
"0.5438766",
"0.54305965",
"0.54246384",
"0.5423301",
"0.54219514",
"0.542... | 0.68982786 | 0 |
Initialize actions respecting the undo state | def initialize_actions(actions, at_action: nil)
@loading = true unless @strict
filtered_actions, active_undos = self.class.filtered_actions(actions)
# Store all actions for history navigation
@raw_all_actions = actions
filtered_actions.each.with_index { |action, index| @raw_all_actions[index]['skip'] = true unless action }
@undo_possible = false
# replay all actions with a copy
filtered_actions.each.with_index do |action, index|
next if @exception
break if at_action && action && action['id'] > at_action
if action
action = action.copy(self) if action.is_a?(Action::Base)
process_action(action)
else
# Restore the original action to the list to ensure action ids remain consistent but don't apply them
@raw_actions << actions[index]
end
end
@redo_possible = active_undos.any?
@loading = false
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def initialize_actions(actions)\n @loading = true unless @strict\n\n filtered_actions, active_undos = self.class.filtered_actions(actions)\n @undo_possible = false\n # replay all actions with a copy\n filtered_actions.each.with_index do |action, index|\n if !action.nil?\... | [
"0.7333719",
"0.67901605",
"0.67657703",
"0.6645486",
"0.66137093",
"0.6516644",
"0.64951825",
"0.61542124",
"0.61542124",
"0.61445355",
"0.60968244",
"0.60396206",
"0.5978256",
"0.58815444",
"0.58815444",
"0.58815444",
"0.58815444",
"0.5868914",
"0.58566856",
"0.58416885",
"... | 0.69653577 | 1 |
Override this if a game has a licensing mechanic for corporations and trains See 1862 for an example | def able_to_operate?(_entity, _train, _name)
true
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def licensing?\n @config[:licensing].present? and licensing.present?\n end",
"def has_license?\n !license.nil?\n end",
"def licensed?\n end",
"def northern_irish_driving_licence; end",
"def license_allows_download?(document)\n document[:license_ss] =~ /(Creative Commons|No known r... | [
"0.6560351",
"0.6552202",
"0.6529222",
"0.6247799",
"0.61493856",
"0.5975425",
"0.5953826",
"0.5939044",
"0.5920886",
"0.5897161",
"0.58722097",
"0.58517766",
"0.58342505",
"0.58165836",
"0.5747795",
"0.5731325",
"0.56645465",
"0.56445944",
"0.562842",
"0.5617983",
"0.561715"... | 0.0 | -1 |
Before rusting, check if this train individual should rust. | def rust?(train, purchased_train)
train.rusts_on == purchased_train.sym ||
(train.obsolete_on == purchased_train.sym && @depot.discarded.include?(train))
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def precheck\n end",
"def training_required?\n return training_required\n end",
"def check_train_wagons\n passenger_trains_amount = 0\n cargo_amount = 0\n @trains.each_value do |train|\n train.type == 'cargo' ? cargo_amount += 1 : passenger_trains_amount += 1\n end\n passenger_matche... | [
"0.6226419",
"0.597877",
"0.5849057",
"0.5763147",
"0.57623893",
"0.57109785",
"0.57100093",
"0.5645068",
"0.55881447",
"0.5569135",
"0.5540164",
"0.55222774",
"0.5515986",
"0.547032",
"0.5467444",
"0.5466887",
"0.54613644",
"0.5457392",
"0.5446979",
"0.544003",
"0.5429832",
... | 0.627899 | 0 |
Called by Engine::Step::BuyCompany to determine if the company's owner is even allowed to sell the company | def company_sellable(company)
!company.owner.is_a?(Corporation)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def may_purchase?(_company)\n false\n end",
"def allowed_to_pay_hbrand_fee?(company)\n admin? || in_company?(company) #|| has_approved_app_for_company?(company)\n end",
"def can_auction?(_company)\n true\n end",
"def check_buyable\n raise PermissionDeniedError, ... | [
"0.748362",
"0.7400567",
"0.7384144",
"0.7213982",
"0.69202995",
"0.69045514",
"0.68470055",
"0.6750493",
"0.6697506",
"0.66418934",
"0.662486",
"0.6598325",
"0.65425855",
"0.6506366",
"0.64560515",
"0.6454004",
"0.6446892",
"0.64221716",
"0.64179957",
"0.6400574",
"0.6379660... | 0.6971757 | 4 |
Called by View::Game::Entities to determine if the company should be shown on entities | def unowned_purchasable_companies(_entity)
[]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def company_sellable(company)\n !company.owner.is_a?(Corporation)\n end",
"def show?\n return true if user.admin? || is_in_company?(record)\n\n record.information_complete? ? true : false\n end",
"def has_company?\n if self.group && self.group.company\n true\n else\n false\n ... | [
"0.6900819",
"0.6585542",
"0.6523663",
"0.65072274",
"0.6440897",
"0.64272255",
"0.6339001",
"0.62563354",
"0.6185978",
"0.6149136",
"0.61482036",
"0.6138704",
"0.6095491",
"0.60709774",
"0.60664165",
"0.6061783",
"0.60610527",
"0.60401773",
"0.6034877",
"0.60239846",
"0.6013... | 0.0 | -1 |
This is a hook to allow game specific logic to be invoked after a company is bought | def company_bought(company, buyer); end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def after_redeem() end",
"def after_purchase(order)\n self.consumed = true\n # send confirmation email to admin and to user, #person.send_email already delays\n notify_admins\n notify_backer\n save!\n\n MixpanelEvent.track(\n person_id: person_id,\n event: 'Create Order',\n check... | [
"0.6699034",
"0.6321153",
"0.6293613",
"0.61444855",
"0.6143001",
"0.6131808",
"0.6104428",
"0.5998178",
"0.5997729",
"0.5953688",
"0.5857501",
"0.5855254",
"0.5827374",
"0.58120406",
"0.58070296",
"0.57128084",
"0.568173",
"0.56773466",
"0.56546104",
"0.56546104",
"0.5638232... | 0.71550274 | 0 |
price is nil, :free, or a positive int | def buy_train(operator, train, price = nil)
operator.spend(price || train.price, train.owner) if price != :free
remove_train(train)
train.owner = operator
operator.trains << train
@crowded_corps = nil
close_companies_on_event!(operator, 'bought_train')
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def free?\n price.blank? || price.zero?\n end",
"def check_price\n end",
"def free?\n cost == 0.0 || payment_not_required\n end",
"def provides_price?\n false\n end",
"def price_validate\n\t\tif (self.pay_mode == \"free\") || (self.pay_mode == \"FREE\") || (self.pay_mode == \"Free\")\n\t... | [
"0.814624",
"0.7294945",
"0.7266531",
"0.7136334",
"0.7132858",
"0.69587946",
"0.6848514",
"0.68371373",
"0.6829545",
"0.6778875",
"0.6760595",
"0.6754013",
"0.6746656",
"0.6692238",
"0.66800207",
"0.66584367",
"0.66484255",
"0.66429615",
"0.66381145",
"0.6625133",
"0.6584813... | 0.0 | -1 |
Override this, and add elements (paragraphs of text) here to display it on Info page. | def timeline
[]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def notes\n super()\n\n section = __method__\n text = \"\"\n html = \"\"\n\n frontend_url = generate_frontend_url\n if frontend_url\n text += \"Frontend URL: #{frontend_url}\\n\\n\"\n add_short_text(\"additional_info\", \"View logs here: #... | [
"0.71658826",
"0.64702886",
"0.645451",
"0.644753",
"0.6378201",
"0.6350963",
"0.62231123",
"0.6143017",
"0.6036384",
"0.60167783",
"0.6009391",
"0.59889",
"0.5919463",
"0.59189594",
"0.5906192",
"0.58695203",
"0.585094",
"0.58284456",
"0.582189",
"0.5806899",
"0.5797181",
... | 0.0 | -1 |
minors to show on player cards | def player_card_minors(_player)
[]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def showHands\n @players.times do |p|\n i=p + 1\n print \"\\nPlayer #{i}:\"\n # DJBHERE DJB HERE str = \"file_\" + i.to_s.rjust(n, \"0\")\n @playersCards[i].each do |c|\n print \" #{c.visible}\"\n end\n end\n end",
"def show_cards_of player\r\n if player == nil\r\n retu... | [
"0.6928605",
"0.6826488",
"0.65146625",
"0.6418183",
"0.6347353",
"0.630805",
"0.6192771",
"0.6187515",
"0.61605054",
"0.61089545",
"0.6089735",
"0.60895765",
"0.60695255",
"0.60596746",
"0.6013748",
"0.6012144",
"0.60065943",
"0.5998221",
"0.5979819",
"0.59753776",
"0.596049... | 0.72604084 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.