query stringlengths 7 9.55k | document stringlengths 10 363k | metadata dict | negatives listlengths 0 101 | negative_scores listlengths 0 101 | document_score stringlengths 3 10 | document_rank stringclasses 102
values |
|---|---|---|---|---|---|---|
used by api_all_posts/find_post for getting the next/prev post | def next
self.class.where("id > ?", id).first
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def next_post\n @next_post ||= self.class.only(:title, :slug).where(:post_date.gt => self.post_date).ascending(:post_date).first\n end",
"def next_post\n self.class.first(:conditions => [\"id > ?\", id], :order => \"id asc\")\n end",
"def find_next_and_prev(*args)\n return find(:next, *arg... | [
"0.7501812",
"0.7237172",
"0.6990505",
"0.69771564",
"0.6958815",
"0.6953897",
"0.6939591",
"0.686992",
"0.6819787",
"0.67346966",
"0.6687774",
"0.66652364",
"0.65822643",
"0.65061253",
"0.64835536",
"0.6462869",
"0.64536405",
"0.64312744",
"0.6392861",
"0.6391852",
"0.631056... | 0.5881885 | 67 |
Add provenance information to a Vivo profile derived from a CAP profile. | def prov_profile(rdf, vivo_uri, cap_uri, cap_modified)
prov_mapping # create most of the PROV once
cap_modified = time_modified(cap_modified)
vivo_modified = time_modified
rdf << [vivo_uri, RDF.type, RDF::PROV.Entity]
rdf << [cap_uri, RDF.type, RDF::PROV.Entity]
rdf << [cap_uri, RDF::PROV.generatedAtTime, cap_modified]
rdf << [vivo_uri, RDF::PROV.wasDerivedFrom, cap_uri]
rdf << [vivo_uri, RDF::PROV.wasGeneratedBy, MAPPING_ACTIVITY]
rdf << [vivo_uri, RDF::PROV.generatedAtTime, vivo_modified]
rdf << [MAPPING_ACTIVITY, RDF::PROV.used, cap_uri]
rdf
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def add_provenance(opts = {})\n if opts[:from] == opts[:to] # special case: provenance between two parts on the same collection\n opts[:from_map] = opts[:to_map] # ensure from map and to map are the same object for this case\n end\n\n # creating information hashes to represent `from` and `to` relatio... | [
"0.5688412",
"0.5663554",
"0.56164765",
"0.5577654",
"0.55572546",
"0.5395352",
"0.5377717",
"0.5325217",
"0.53162766",
"0.5259557",
"0.5234151",
"0.5216931",
"0.51850855",
"0.5147632",
"0.51038295",
"0.50917876",
"0.5091658",
"0.5089185",
"0.5076521",
"0.50658333",
"0.504702... | 0.6687664 | 0 |
Save the PROV mapping activity and associated agent data to the triple store once. | def prov_mapping
@@prov_mapping ||= begin
g = RDF::Graph.new
g << [MAPPING_ENTITY, RDF.type, RDF::PROV.Entity]
g << [MAPPING_ACTIVITY, RDF.type, RDF::PROV.Activity]
g << [MAPPING_ACTIVITY, RDF::PROV.wasAssociatedWith, MAPPING_AGENT]
g << [MAPPING_AGENT, RDF.type, RDF::PROV.Agent]
g << [MAPPING_AGENT, RDF.type, RDF::PROV.Person]
g << [MAPPING_AGENT, RDF::FOAF.name, MAPPING_AGENT_NAME]
g << [MAPPING_AGENT, RDF::PROV.actedOnBehalfOf, MAPPING_ORG]
g << [MAPPING_ORG, RDF.type, RDF::PROV.Agent]
g << [MAPPING_ORG, RDF.type, RDF::PROV.Organization]
g << [MAPPING_ORG, RDF::FOAF.name, MAPPING_ORG_NAME]
if Cap::Vivo.configuration.rdf_prov
g.each_statement do |s|
Cap::Vivo.configuration.rdf_repo.insert_statement s
end
path = Cap::Vivo.configuration.rdf_path
f = File.open(File.join(path, "prov.ttl"), 'w')
f.write g.to_ttl
f.close
end
true
rescue => e
Cap::Vivo.configuration.logger.error e.message
false
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def persist!\n @snapshot.save!\n @processes.each(&:save!)\n persist_labels\n persist_metrics\n end",
"def save\n das = []\n @map.each_key do |key|\n if key == DATAMATRIX_KEY\n das.concat save_data_matrix_alt(@object, @map[key])\n elsif @map[key].is_a? UploadAssoc\n ... | [
"0.6197693",
"0.55192554",
"0.5396934",
"0.5390301",
"0.5384215",
"0.5380797",
"0.5317936",
"0.53015876",
"0.5296394",
"0.52740514",
"0.5265661",
"0.5254812",
"0.5249568",
"0.5227665",
"0.51633376",
"0.5114509",
"0.510434",
"0.5091118",
"0.5077552",
"0.5066854",
"0.50525016",... | 0.60102767 | 1 |
Generate or parse a datetime | def time_modified(t=nil)
if t.nil?
t = DateTime.now.new_offset(0)
elsif t.is_a? String
t = DateTime.parse(t).new_offset(0)
elsif t.is_a? Time
t = t.utc
elsif t.is_a? Date
t = t.to_datetime.new_offset(0)
elsif t.is_a? DateTime
t = t.new_offset(0)
end
RDF::Literal.new(t, :datatype => RDF::XSD.dateTime)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def parse_datetime(str)\n parts = str.scan(/.{2}/)\n year = Time.now.year.to_s[0..1] + parts[0]\n month = parts[1]\n day = parts[2]\n hour = parts[3]\n minute = parts[3]\n \"#{year}-#{month}-#{day} #{hour}:#{minute}\"\n end",
"def post_process_datetime( val )\n\t\t\treturn T... | [
"0.7070377",
"0.69299585",
"0.6742204",
"0.6651231",
"0.6609041",
"0.6601421",
"0.6598809",
"0.65349627",
"0.6492359",
"0.6484031",
"0.64280665",
"0.63961625",
"0.6391698",
"0.6365738",
"0.6361959",
"0.6351365",
"0.63331765",
"0.62859774",
"0.62728083",
"0.62657666",
"0.62652... | 0.0 | -1 |
Resets the size of the +weights+ array. If the size is greater, adds zero elements until size is met. Othewise, removes weights from +weights+. | def size=(size)
dif = size - @size
dif < 0 ? @weights.pop(-dif) : @weights.push(Array.new(dif, 0)).flatten!
dif < 0 ? @inputs.pop(-dif) : @inputs.push(Array.new(dif, 0)).flatten!
@size = size
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def remove_all()\n @field_weights = Array.new\n end",
"def SetWeights(weights)\n assert { weights.instance_of? Array }\n weights.each do |weight|\n assert { weight.instance_of? Fixnum }\n end\n\n @weights = weights\n end",
"def mutate\n @weights = (1..@size).map { Rando... | [
"0.6416733",
"0.6205409",
"0.5984365",
"0.59631985",
"0.585931",
"0.5787518",
"0.57487696",
"0.57204455",
"0.5692928",
"0.5599806",
"0.5367201",
"0.53049046",
"0.5292827",
"0.52518374",
"0.51847726",
"0.51425505",
"0.5142335",
"0.51288176",
"0.51170987",
"0.5088598",
"0.50417... | 0.7093326 | 0 |
Randomly assigns weights to the +weights+ array. | def mutate
@weights = (1..@size).map { Random.rand(-0.5..0.5) }
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def SetWeights(weights)\n assert { weights.instance_of? Array }\n weights.each do |weight|\n assert { weight.instance_of? Fixnum }\n end\n\n @weights = weights\n end",
"def randomize(weights=nil)\n return randomize(map {|n| n.send(weights)}) if weights.is_a? Symbol\n\n weights... | [
"0.7380053",
"0.7277428",
"0.72720736",
"0.7137935",
"0.71321714",
"0.70666707",
"0.6909901",
"0.6757212",
"0.6592405",
"0.6482456",
"0.64744115",
"0.64625245",
"0.6398976",
"0.6303865",
"0.6303865",
"0.61887765",
"0.6079592",
"0.6035934",
"0.60287535",
"0.5992689",
"0.594848... | 0.77612984 | 0 |
Return the weighted sum of inputs. Assumes inputs are contained in the neuron if not provided. | def weightedsum
ws = 0
@inputs.each_index { |i| ws = ws + @inputs[i]*@weights[i] }
ws
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def feedforward(inputs)\n # Sum all values\n sum = @weights.zip(inputs).map { |a, b| a * b }.inject(0, :+)\n # Result is sign of the sum, -1 or 1\n activate(sum)\n end",
"def calculate_weighted_sum(base, weight_factors); end",
"def total_input\n @inputs.inject(:+)\n end",
"def sums_all... | [
"0.71419394",
"0.68680716",
"0.66729486",
"0.6493768",
"0.63657105",
"0.62737894",
"0.6232758",
"0.6125492",
"0.6092445",
"0.6060865",
"0.6060409",
"0.6022857",
"0.6001466",
"0.5982035",
"0.5930714",
"0.5910342",
"0.58885425",
"0.588458",
"0.58714414",
"0.58582443",
"0.585796... | 0.84757197 | 0 |
Apply the sigmoid acivation function and return the result. | def activate
@output = (1.0 / (1.0 + Math.exp(-1 * self.weightedsum - @bias)))
@output
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def sigmoid x\n 1.0 / (1 + Math::E** -x)\n end",
"def d_sigmoid(x)\n sigmoid(x) * (1 - sigmoid(x))\n end",
"def sigmoidal(activation_value)\n\tneuron_value=1/(1 + 2.71828 ** activation_value)\n\treturn neuron_value\nend",
"def sigmoid x\n x.map {|el| 0.5 * (1 + Math.tanh(el/@epsilon)) }\n end",
... | [
"0.7423981",
"0.7369767",
"0.724298",
"0.7165206",
"0.7158196",
"0.6555789",
"0.643548",
"0.6356133",
"0.6079093",
"0.60557675",
"0.5820198",
"0.5654549",
"0.55422103",
"0.55422103",
"0.5365704",
"0.53624296",
"0.529572",
"0.52290505",
"0.51681274",
"0.51238406",
"0.5107453",... | 0.6368261 | 7 |
Determine +error+ and +gradient+. Value is used exactly if exact is true in a hash, otherwise the value is compared against +output+. | def error=(e)
if e.is_a?(Hash)
value = e[:value]
exact = e[:exact]
else
value = e
exact = true
end
@error = exact ? value : (value - @output)
@gradient = @output * (1 - @output) * @error
@error
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def error\n 1.0 - accuracy()\n end",
"def hash_function_and_value\n return if hash_value.nil?\n if hash_function.nil? then\n return if errors.include?( :hash_value )\n errors.add( :hash_value, I18n.t( 'cfr_records.msg.hash_wo_fct' ))\n else\n case hash_function\n when 0 # MD5... | [
"0.53865325",
"0.5267203",
"0.51096433",
"0.5085192",
"0.49521282",
"0.4945322",
"0.49433422",
"0.48458257",
"0.47774798",
"0.4777257",
"0.47115555",
"0.47073373",
"0.4655731",
"0.4654314",
"0.46430433",
"0.4608835",
"0.45964983",
"0.4573721",
"0.45713344",
"0.45622984",
"0.4... | 0.6619644 | 0 |
Alters the +weights+ array according to +rate+ and +gradient+. | def train
@weights.each_index do |i|
# alter weight and apply momentum
@weights[i] = @weights[i] + (@rate * inputs[i] * @gradient)
@weights[i] = @weights[i] + @momentum * @lastchange
@lastchange = @rate * inputs[i] * @gradient
end
@weights
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update_weights\n @weights_set.each_weights_with_index do |weights, i|\n weights.each_with_index do |wl, j|\n wl.each_with_index do |w, k|\n wl[k] = w - (@learning_rate * @layers[i].nodes[j].this_output * @layers[i+1].nodes[k].this_backward_output)\n end\n end\n ... | [
"0.70200753",
"0.66459846",
"0.61175424",
"0.58790505",
"0.58276886",
"0.58088493",
"0.5682545",
"0.5570744",
"0.55451596",
"0.552092",
"0.5494294",
"0.54550266",
"0.5401866",
"0.5313463",
"0.5284602",
"0.5275065",
"0.51711917",
"0.51295435",
"0.51034564",
"0.5080689",
"0.507... | 0.71171224 | 0 |
Sends events to InfluxDB instance | def process(event)
event_data = build_event_data(event)
transform_and_write(event, event_data)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def send_events; end",
"def run(event)\n begin\n # Process each line as an individual data point.\n # We expect lines to look like the following:\n #\n # {\"metric\": \"series.name\", \"value\": <value>, \"time\": <time>, \"meta1\": <v1>, \"meta2\", <v2>}\n #\n # Th... | [
"0.61887956",
"0.5858483",
"0.57966053",
"0.5766388",
"0.56509364",
"0.56074506",
"0.5539165",
"0.55095005",
"0.5501578",
"0.5493861",
"0.5466131",
"0.54602164",
"0.5456018",
"0.5433575",
"0.54174715",
"0.53938365",
"0.5368205",
"0.5349423",
"0.53370357",
"0.53032726",
"0.528... | 0.0 | -1 |
Builds an EventData struct for the event | def build_data(event, type, value)
data = EventData.new
data.name = event.name
data.tags = @tags.merge(event.tags.merge(type: type))
data.fields = value.is_a?(Hash) ? value : { value: value }
data.timestamp = calc_timestamp(event)
data
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_data(event, data={})\n self['event'] = event\n self['properties'] = data \n self['properties']['token'] = @key\n self['properties']['time'] = Time.now.to_i\n \n Base64.encode64(JSON.generate(self))\n end",
"def make_cloud_event data,\n id: nil,\n ... | [
"0.683253",
"0.66712433",
"0.6597189",
"0.6569381",
"0.6459051",
"0.64570755",
"0.6434016",
"0.6394337",
"0.63798267",
"0.6371044",
"0.62459713",
"0.6233024",
"0.6207171",
"0.61597395",
"0.6134273",
"0.6115469",
"0.6112472",
"0.6112472",
"0.6043187",
"0.6004768",
"0.59538007"... | 0.84404904 | 0 |
creates a new gladiator | def set_gladiator(name)
gladiator_array=[]
gladiator=name.get_gladiator
gladiator.to_s
gladiator_array.push(gladiator)
return gladiator_array
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def add_gladiator(g)\n if gladiators.count < 2\n gladiators << g\n end\n end",
"def new\n @gram = Gram.new\n end",
"def new\n @lounge = Lounge.new\n end",
"def new; end",
"def new; end",
"def new; end",
"def new; end",
"def new; end",
"def new; end",
"def new; end",
"def new... | [
"0.64790356",
"0.64231545",
"0.613472",
"0.605719",
"0.605719",
"0.605719",
"0.605719",
"0.605719",
"0.605719",
"0.605719",
"0.605719",
"0.605719",
"0.6029247",
"0.5995323",
"0.5995323",
"0.5995323",
"0.5995323",
"0.5995323",
"0.5963302",
"0.581475",
"0.5810025",
"0.5810025... | 0.0 | -1 |
tag_regex was quoted from | def tag_regex
/<[^"'<>]*(?:"[^"]*"[^"'<>]*|'[^']*'[^"'<>]*)*(?:>|(?=<)|$)/
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_tag_regex(tag)\n return Regexp.new(\"\\\\A\\\\{#{tag} /\\\\}\")\n end",
"def get_open_tag_regex(tag)\n return Regexp.new(\"\\\\A\\\\{#{tag}\\\\}\")\n end",
"def regex\n 'tag regex not defined'\n end",
"def get_close_tag_regex(tag)\n return Regexp.new(\"\\\\A\\\\{/#{tag}\\\\}\")\n ... | [
"0.7461963",
"0.7303174",
"0.7204881",
"0.7025602",
"0.6678563",
"0.6352099",
"0.6322554",
"0.62146616",
"0.61767477",
"0.6152538",
"0.6148737",
"0.61422014",
"0.6126073",
"0.6093029",
"0.6059583",
"0.6054626",
"0.6001164",
"0.6000584",
"0.5981174",
"0.59735584",
"0.5925583",... | 0.7697845 | 0 |
SetCookie: exp_last_visit=959242411; expires=Mon, 23May2011 03:13:31 GMT; path=/ SetCookie: exp_last_activity=1274602411; expires=Mon, 23May2011 03:13:31 GMT; path=/ SetCookie: exp_tracker=a%3A1%3A%7Bi%3A0%3Bs%3A5%3A%22index%22%3B%7D; path=/ | def passive(target)
m = []
m << { name: "exp_last_visit cookie" } if target.headers["set-cookie"] =~ /exp_last_visit=/
m
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cookie_value; end",
"def cookie\n { :value => Crypt.encrypt(cookie_value), :expires => 1.year.from_now }\n end",
"def collect_cookies_from_response; end",
"def get_cookie(data)\n if data['set-cookie']\n t = []\n data['Set-Cookie'].split(\", \").map{|c|\n tmp = c.scan(/([a-zA-Z0-9_... | [
"0.69871426",
"0.6624166",
"0.657361",
"0.6557597",
"0.6555562",
"0.64811134",
"0.6456388",
"0.6456388",
"0.6456388",
"0.6456388",
"0.6456388",
"0.6456388",
"0.6456388",
"0.639051",
"0.63844275",
"0.6376919",
"0.6364097",
"0.6356716",
"0.63495785",
"0.6340304",
"0.63357997",
... | 0.61160463 | 39 |
these plugins only identify the system. they don't find out the version, etc | def aggressive(target)
m = []
url = URI.join(target.uri.to_s, "READ_THIS_FIRST.txt").to_s
new_target = WhatWeb::Target.new(url)
if /ExpressionEngine/.match?(new_target.body)
m << { name: "readthisfirst txt file" }
end
url = URI.join(target.uri.to_s, "system/updates/ee_logo.jpg").to_s
new_target = WhatWeb::Target.new(url)
if (new_target.status == 200) && new_target.body =~ /JFIF/
m << { name: "ee_logo jpg" }
end
m
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def system_information\n super\n end",
"def system_information\n super\n end",
"def host_os; end",
"def host_os; end",
"def plugin_info\n {\n 'name' => plugin_name,\n 'description' => 'The Core plugin provides a lot of the basic functionality required by WebD... | [
"0.6584066",
"0.6584066",
"0.64166236",
"0.64166236",
"0.6325491",
"0.6312212",
"0.6312212",
"0.6312212",
"0.6312212",
"0.62233603",
"0.615205",
"0.61220914",
"0.61198616",
"0.6104687",
"0.6097755",
"0.6097755",
"0.6097755",
"0.60565907",
"0.6043349",
"0.60408026",
"0.6036763... | 0.0 | -1 |
GET /cuts/1 GET /cuts/1.xml | def show
@cut = Cut.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.xml { render :xml => @cut }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def index\n @cutoffs = Cutoff.all\n end",
"def destroy\n @cut = Cut.find(params[:id])\n @cut.destroy\n\n respond_to do |format|\n format.html { redirect_to(cuts_url) }\n format.xml { head :ok }\n end\n end",
"def show\n @cutout = Cutout.find(params[:id])\n\n respond_to do |form... | [
"0.6026713",
"0.556015",
"0.55064845",
"0.5161346",
"0.5094287",
"0.5093199",
"0.507121",
"0.50341946",
"0.5016716",
"0.49953574",
"0.49876052",
"0.49757472",
"0.49327156",
"0.4927696",
"0.4904725",
"0.48944628",
"0.4865665",
"0.4860705",
"0.48521563",
"0.48521563",
"0.484707... | 0.68019354 | 0 |
GET /cuts/new GET /cuts/new.xml | def new
@cut = Cut.new
render :layout => "fancybox"
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def show\n @cut = Cut.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @cut }\n end\n end",
"def new\n @cutout = Cutout.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @cutout }... | [
"0.6451809",
"0.6331289",
"0.6289545",
"0.6241494",
"0.5978675",
"0.5892836",
"0.58688766",
"0.58075696",
"0.5772072",
"0.57614475",
"0.5710988",
"0.5646958",
"0.5638855",
"0.56236887",
"0.5607283",
"0.5607088",
"0.55898833",
"0.55588573",
"0.55535114",
"0.55435586",
"0.55257... | 0.0 | -1 |
POST /cuts POST /cuts.xml | def create
@cut = Cut.new(params[:cut])
respond_to do |format|
#format.html # new.html.erb
#format.xml { render :xml => @cut }
if @cut.save
format.html {render :template => 'shared/close', :layout => "fancybox"}
format.js
else
render :action => "new"
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n @cut = Cut.new(params[:cut])\n\n respond_to do |format|\n if @cut.save\n format.html { redirect_to @cut, notice: 'Cut was successfully created.' }\n format.json { render json: @cut, status: :created, location: @cut }\n else\n format.html { render action: \"new\" }\... | [
"0.5507414",
"0.50106007",
"0.49711767",
"0.4925997",
"0.48120427",
"0.4753982",
"0.47369233",
"0.47134778",
"0.46512583",
"0.46352363",
"0.4612229",
"0.45969674",
"0.45692724",
"0.4507522",
"0.45024654",
"0.4484568",
"0.4462535",
"0.44606534",
"0.4423265",
"0.44033647",
"0.4... | 0.4571058 | 12 |
PUT /cuts/1 PUT /cuts/1.xml | def update
@cut = Cut.find(params[:id])
respond_to do |format|
if @cut.update_attributes(params[:cut])
format.html {render :template => 'shared/close', :layout => "fancybox"}
format.xml { head :ok }
else
format.html { render :action => "edit" }
format.xml { render :xml => @cut.errors, :status => :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update\n @cut = Cut.find(params[:id])\n\n respond_to do |format|\n if @cut.update_attributes(params[:cut])\n format.html { redirect_to @cut, notice: 'Cut was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n ... | [
"0.62571895",
"0.5593645",
"0.55807143",
"0.5572266",
"0.5501883",
"0.53673387",
"0.531672",
"0.5299393",
"0.5261561",
"0.5250509",
"0.52018464",
"0.51516575",
"0.50983614",
"0.50685215",
"0.5066233",
"0.505698",
"0.50537854",
"0.5052829",
"0.50149864",
"0.50004613",
"0.49976... | 0.58855915 | 1 |
DELETE /cuts/1 DELETE /cuts/1.xml | def destroy
@cut = Cut.find(params[:id])
@cut.destroy
respond_to do |format|
format.html { redirect_to(cuts_url) }
format.xml { head :ok }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def netdev_resxml_delete( xml )\n top = netdev_resxml_top( xml )\n par = top.instance_variable_get(:@parent)\n par['delete'] = 'delete'\n end",
"def destroy\n @cut = Cut.find(params[:id])\n @cut.destroy\n\n respond_to do |format|\n format.html { redirect_to cuts_url }\n format.json {... | [
"0.6323922",
"0.61534464",
"0.59840083",
"0.59013546",
"0.5826975",
"0.5794585",
"0.5763212",
"0.56667894",
"0.566043",
"0.56365645",
"0.5619103",
"0.561894",
"0.56108445",
"0.5600029",
"0.55876774",
"0.55876774",
"0.5578834",
"0.5530302",
"0.55084825",
"0.5494566",
"0.548681... | 0.68970066 | 0 |
handle finding listing here separately | def listings
@alert = Alert.new
#Paginated queries
#@offices = Office.where("loc_zip = ?", params[:city]).page(params[:page])
@search_string = params[:city]
@sort = params[:sort]
@direction = params[:dir]
@rentto = params[:rentto]
@rentfrom = params[:rentfrom]
if (@rentto == nil)
@rentto = '10000'
end
if(@rentfrom == nil)
@rentfrom = '0'
end
activefilter = 'active_flag != 0'
rentfilter = 'rent >= ? AND rent <= ?'
#@offices = Office.near(@search_string, 20, :order => 'rent IS NULL, rent > 0, rent ASC').where(activefilter).where(rentfilter, @rentfrom, @rentto).page(params[:page])
if (@search_string != "")
@offices = Office.near(@search_string, 20).where(rentfilter, @rentfrom, @rentto).where(activefilter).page(params[:page])
else
@offices = Office.order('created_at DESC').where(rentfilter, @rentfrom, @rentto).where(activefilter).page(params[:page])
end
session[:query] = @offices.map(&:id)
session[:search_results] = request.url
#@json = @offices.to_gmaps4rails do |office, marker|
# marker.infowindow render_to_string(:partial => "/offices/infowindow", :locals => { :office => office})
# marker.title "#{office.title}"
# marker.picture({ :picture => "/assets/office-building.png", :width =>32, :height => 32})
#end
track_event("Viewed Search Results")
respond_to do |format|
format.html # listings.html.erb
format.json { render json: @offices }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def search_results(all_pages)\n formatted_list = []\n all_pages.each do |show_hash|\n formatted_list << \"id. #{show_hash[\"id\"]} - #{show_hash[\"name\"]}\"\n end\n if formatted_list.count != 1\n self.print_search_results(formatted_list)\n else\n fetch_show_by_id(all_pages[0][\"id\"].to_s)\n end\ne... | [
"0.6504202",
"0.62336254",
"0.62258184",
"0.61401534",
"0.6111487",
"0.60222465",
"0.6021486",
"0.5999449",
"0.5929894",
"0.589113",
"0.58715796",
"0.5851858",
"0.5814259",
"0.5805632",
"0.5797818",
"0.57919127",
"0.5780912",
"0.5722824",
"0.5699171",
"0.5698427",
"0.5686183"... | 0.0 | -1 |
Creates a new queue using the +:name+ option in +options+ | def initialize options
super
@group = 'Queue'
@name = options[:name]
@type = 'Queue'
@queue = Queue.new
@last_description = Time.at 0
@last_size = 0
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create name, options = {}\n\n # SQS removed the default prefix to the visibility timeout option\n # in the 2011-10-01 update -- this allows us to not break existing\n # customers.\n if options[:default_visibility_timeout]\n options[:visibility_timeout] = \n optio... | [
"0.842686",
"0.8209872",
"0.8209872",
"0.76802063",
"0.73292136",
"0.70913476",
"0.7061851",
"0.70139307",
"0.6908772",
"0.68558156",
"0.6745008",
"0.6724678",
"0.6669589",
"0.6665065",
"0.6596163",
"0.6556369",
"0.6545494",
"0.65198976",
"0.6504233",
"0.6492756",
"0.64527375... | 0.7403372 | 4 |
Validates the size of an uploaded picture. | def picture_size
if picture.size > 5.megabytes
errors.add(:picture, "should be less than 5MB")
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def picture_size\n if picture.size > 5.megabytes\n errors.add(:picture, \"should be less than 5MB\")\n end\n end",
"def picture_size\n if picture.size > 5.megabytes\n errors.add(:picture, \"should be less than 5MB\")\n end\n end",
"def picture_size\n errors.add(:pictu... | [
"0.8246438",
"0.8246069",
"0.8203719",
"0.8203224",
"0.8199266",
"0.8188133",
"0.8187386",
"0.8179088",
"0.81592035",
"0.81588674",
"0.8158672",
"0.81494313",
"0.81494313",
"0.81494313",
"0.81494313",
"0.81494313",
"0.81494313",
"0.81494313",
"0.81494313",
"0.81494313",
"0.81... | 0.8011205 | 60 |
Validates the size of an uploaded picture. | def tag_count
if tag_list.size > 8
errors.add(:tag_list, "should be at most 8")
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def picture_size\n if picture.size > 5.megabytes\n errors.add(:picture, \"should be less than 5MB\")\n end\n end",
"def picture_size\n if picture.size > 5.megabytes\n errors.add(:picture, \"should be less than 5MB\")\n end\n end",
"def picture_size\n errors.add(:pictu... | [
"0.8246438",
"0.8246069",
"0.8203719",
"0.8203224",
"0.8199266",
"0.8188133",
"0.8187386",
"0.8179088",
"0.81592035",
"0.81588674",
"0.8158672",
"0.81494313",
"0.81494313",
"0.81494313",
"0.81494313",
"0.81494313",
"0.81494313",
"0.81494313",
"0.81494313",
"0.81494313",
"0.81... | 0.0 | -1 |
both tagging and search functionality for tags is working if you comment out load_album we still have to fix the routes here (error message: no album id) and make the views awesome | def search
if params[:tag]
@pictures = Picture.tagged_with(params[:tag])
elsif params[:search]
@pictures = Picture.tagged_with(params[:search])
else
redirect_to albums_path and return
end
render "index"
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def index\n params[:tag] ? @photos = Photo.tagged_with(params[:tag]) : @photos = Photo.all\n album = params[:album_id]\n @photos = current_user.photos.all.where(album_id: album).with_attached_pictures\n\n end",
"def show\n @tag = Tag.find(params[:id])\n if current_admin\n if params[:pin]\n ... | [
"0.68110055",
"0.6789051",
"0.66372424",
"0.65374595",
"0.653477",
"0.65324044",
"0.6513146",
"0.6490021",
"0.64778364",
"0.6429734",
"0.63510275",
"0.62893426",
"0.62779796",
"0.6252853",
"0.622662",
"0.62258863",
"0.62258863",
"0.62106645",
"0.6207834",
"0.61413425",
"0.613... | 0.7268 | 0 |
we use get content type rather than downloading the resource as image and html can be fairly large and the are no need to download it if it's an image, so we use HEAD request instead to what is the content type | def get_content_type(uri)
# TODO double check that open, doesn't make a get request, otherwise use a lower level lib like Net::HTTP
req = open(uri)
req.content_type
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def http_head\n return @head if defined?(@head)\n begin\n @head = conn.request(http_request(:head, resource[:source]))\n fail \"#{resource[:http_verb].to_s.upcase} #{resource[:source]} \" +\n \"returned #{@head.code}\" unless @head.code == '200'\n rescue Exception => e\n fail \"Fa... | [
"0.7709937",
"0.6888459",
"0.68741447",
"0.6827807",
"0.6734341",
"0.6668738",
"0.66157055",
"0.64784664",
"0.64784664",
"0.6434971",
"0.640684",
"0.6392423",
"0.6391949",
"0.636833",
"0.6366609",
"0.6365202",
"0.63535523",
"0.6352472",
"0.63133645",
"0.6278015",
"0.62655914"... | 0.55501664 | 96 |
GET /phrases GET /phrases.json | def index
@phrase = Phrase.where(state: nil).order("RANDOM()").first
cookies[:phrase_id] = @phrase.id
respond_to do |format|
format.html
format.js
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def phrases\n response[\"phrases\"]\n end",
"def get_phrases(word, *args)\n http_method = :get\n path = '/word/{word}/phrases'\n path.sub!('{word}', word.to_s)\n\n # Ruby turns all key-value arguments at the end into a single hash\n # e.g. Wordnik.word.get_examples('dingo', :limit => 1... | [
"0.766637",
"0.7291935",
"0.6659579",
"0.6614892",
"0.6614892",
"0.65226316",
"0.6438886",
"0.63866085",
"0.6353058",
"0.6264913",
"0.61931",
"0.6171728",
"0.6171728",
"0.6156062",
"0.6146977",
"0.61302704",
"0.5896962",
"0.5822613",
"0.57709056",
"0.5565925",
"0.5564534",
... | 0.0 | -1 |
Never trust parameters from the scary internet, only allow the white list through. | def update_phrases
if cookies[:phrase_id].present?
@sequence = Phrase.order("sequence ASC").last.sequence
if @sequence == Phrase.count
Phrase.reset_all
else
@last_phrase = Phrase.where(id: cookies[:phrase_id]).first
@last_phrase.update(state: 'displayed', sequence: @sequence+1)
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def strong_params\n params.require(:user).permit(param_whitelist)\n end",
"def strong_params\n params.require(:listing_member).permit(param_whitelist)\n end",
"def allow_params_authentication!; end",
"def allowed_params\n ALLOWED_PARAMS\n end",
"def default_param_whitelist\n [\"mode\"]\n... | [
"0.69792545",
"0.6781151",
"0.67419964",
"0.674013",
"0.6734356",
"0.6591046",
"0.6502396",
"0.6496313",
"0.6480641",
"0.6477825",
"0.64565",
"0.6438387",
"0.63791263",
"0.63740575",
"0.6364131",
"0.63192815",
"0.62991166",
"0.62978333",
"0.6292148",
"0.6290449",
"0.6290076",... | 0.0 | -1 |
Is the ground tile empty? | def is_empty?()
@livingbeing == nil
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def empty?\n is_empty = false\n if self.bounds\n is_empty = ((self.bounds.top + self.bounds.left + self.bounds.bottom + self.bounds.right) == 0)\n end\n\n is_empty\n end",
"def empty?\n (w * h).zero?\n end",
"def surrounding_tile_empty?(board, direction, tile_position)\n begin\... | [
"0.74410087",
"0.74383813",
"0.7423922",
"0.741895",
"0.74007225",
"0.72829574",
"0.71477705",
"0.7119536",
"0.7119525",
"0.7108425",
"0.7085447",
"0.70107347",
"0.7001963",
"0.7000623",
"0.6994056",
"0.6988715",
"0.69496787",
"0.69322276",
"0.69247824",
"0.69168323",
"0.6909... | 0.0 | -1 |
This method determines if the tile has a plant | def is_a_plant?()
!is_empty? && @livingbeing.is_a?(Plant)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def nice_place_for_tower? ghost\n @towers.collide_sprite(ghost).empty? and @the_path.collide_sprite(ghost).empty?\n end",
"def has_creature?\r\n not @creatures.empty?\r\n end",
"def tall_grass?\n return $game_player.system_tag == TGrass\n end",
"def is_valid_tile?(name)\n\t\terror = \"Not... | [
"0.68494105",
"0.6468489",
"0.6453438",
"0.6348676",
"0.6315983",
"0.630457",
"0.62312984",
"0.6152149",
"0.61064565",
"0.60894984",
"0.60883355",
"0.6071054",
"0.6063045",
"0.6054763",
"0.5998047",
"0.59435403",
"0.5904917",
"0.5901336",
"0.5894541",
"0.5894087",
"0.574087",... | 0.7292436 | 0 |
This method determines if the tile has an animal | def is_an_animal?()
!is_empty? && @livingbeing.is_a?(Animal)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def nice_place_for_tower? ghost\n @towers.collide_sprite(ghost).empty? and @the_path.collide_sprite(ghost).empty?\n end",
"def wall?(tile)\n tile.flatten.count {|t| t == WALL} > 4\n end",
"def is_empty_or_HQ tile\n return tile.obstacle_type == Obstacle_type::Empty || tile.obstacle_type == Obstac... | [
"0.6639073",
"0.6451619",
"0.6317571",
"0.63141227",
"0.6272857",
"0.6264759",
"0.62261534",
"0.61925375",
"0.60957575",
"0.6067087",
"0.6049414",
"0.60393476",
"0.5958589",
"0.59578353",
"0.59447944",
"0.5918232",
"0.5845404",
"0.58420676",
"0.5825059",
"0.58167815",
"0.5807... | 0.69774234 | 0 |
This method determines if the tile has a vegetarian animal | def is_a_vegetarian?()
!is_empty? && @livingbeing.is_a?(Vegetarian)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_an_animal?()\n\n !is_empty? && @livingbeing.is_a?(Animal)\n end",
"def vegetarian?\n \t\tif @toppings.any? {|topping| topping.vegetarian != false }\n \t\t\treturn true\n \t\telse\n \t\t\treturn false\n \t\tend\n \tend",
"def vegetarian?\n @toppings.all? {|each_topping| each_topping.ve... | [
"0.6688679",
"0.6641568",
"0.65086174",
"0.64647734",
"0.6422737",
"0.6283036",
"0.6266044",
"0.62386024",
"0.6182373",
"0.61469877",
"0.6046685",
"0.59861356",
"0.5950294",
"0.5869207",
"0.58576643",
"0.5845165",
"0.58387464",
"0.5808496",
"0.57851875",
"0.5766789",
"0.57640... | 0.66938066 | 0 |
This method determines if the tile has a predator animal | def is_a_predator?()
!is_empty? && @livingbeing.is_a?(Predator)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def hunter_trapped?\n\t\tcorners = []\n\t\t[-1, +1].each{|dx| [-1, +1].each{|dy| corners << {:x => @hunter.x + dx, :y => @hunter.y + dy} } }\n\t\t!(corners.map{|p| occupied?(p[:x], p[:y])}.include? false )\n\tend",
"def nice_place_for_tower? ghost\n @towers.collide_sprite(ghost).empty? and @the_path.collide... | [
"0.6328686",
"0.6325761",
"0.62153625",
"0.6053454",
"0.60182047",
"0.6017554",
"0.59561473",
"0.5912346",
"0.59093577",
"0.5877264",
"0.58567315",
"0.5852423",
"0.58476824",
"0.5830173",
"0.5800969",
"0.5795487",
"0.575086",
"0.5749187",
"0.5734128",
"0.572914",
"0.5710581",... | 0.6483757 | 0 |
logo size width and height need gt 100px, and image should be square | def validate_log_image_size
return unless logo.image_size.present?
w, h = logo.image_size
if (h < 100 || w < 100) || (h.to_f / w.to_f != 1)
errors.add :base, 'Logo size width and height need gt 100px, and image should be square.'
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def resize_image\n unless logo.nil?\n if logo.height != 100\n self.logo = logo.thumb('x100') # resize height and maintain aspect ratio\n end\n end\n end",
"def resize_logo\n logo = params[:airline][:logo]\n return if logo.nil?\n\n begin\n logo = MiniMagi... | [
"0.7331914",
"0.65917194",
"0.6505472",
"0.63392544",
"0.6267421",
"0.6179723",
"0.6127861",
"0.61276996",
"0.6120776",
"0.6090901",
"0.6090901",
"0.6012104",
"0.6002215",
"0.59828067",
"0.59826696",
"0.59786445",
"0.5935857",
"0.5935857",
"0.5903009",
"0.59029835",
"0.587061... | 0.7248209 | 1 |
Returns `true` if this URI is a URN. | def urn?
@object ? @object[:scheme] == 'urn' : start_with?('urn:')
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def uri?\n !!@uri\n end",
"def uri?\n !!@uri\n end",
"def valid?\n RDF::URI::IRI.match?(to_s) || false\n end",
"def url?\n !urn?\n end",
"def is_uri?(uri)\n URI::Generic===uri\n end",
"def uri?(word)\n return false if config.include_uris\n !(word =~ URI.r... | [
"0.6882891",
"0.6882891",
"0.68492466",
"0.6816075",
"0.67333394",
"0.6691157",
"0.66678935",
"0.6644357",
"0.6536065",
"0.64340204",
"0.6413093",
"0.63922304",
"0.63886434",
"0.63557637",
"0.6315152",
"0.6289234",
"0.6245365",
"0.62395966",
"0.62268573",
"0.62099385",
"0.618... | 0.76560193 | 0 |
Returns `true` if the URI scheme is hierarchical. | def hier?
!NON_HIER_SCHEMES.include?(scheme)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def hierarchical?\n if @path\n true\n else\n false\n end\n end",
"def hierarchical?\n false\n end",
"def hierarchical?\n @hierarchical ||= !klass.fact_model.hierarchical_levels.empty?\n end",
"def hierarchical?()\n return self.id =~ /\\Ahl_/i\n end"... | [
"0.80149424",
"0.74122113",
"0.7324552",
"0.68569297",
"0.67666864",
"0.6473802",
"0.63790053",
"0.62613755",
"0.6171701",
"0.61692697",
"0.6091672",
"0.6055746",
"0.60151243",
"0.5936152",
"0.5927632",
"0.59077466",
"0.5900963",
"0.58724177",
"0.58352447",
"0.5830553",
"0.57... | 0.67259824 | 5 |
Returns `true` if this URI is a URL. | def url?
!urn?
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_url?\n path =~ URL_PATHS\n end",
"def is_url?\n self =~ /^#{URI::regexp}$/\n end",
"def is_url?(path)\n path.to_s =~ %r{\\Ahttps?://}\n end",
"def url?(uri)\n /\\w+\\:\\/\\// =~ uri\n end",
"def url?(uri)\n /\\w+\\:\\/\\// =~ uri\n end",
"def is_a_ur... | [
"0.8149932",
"0.81114274",
"0.8061138",
"0.804516",
"0.8006839",
"0.7865508",
"0.78260535",
"0.7794512",
"0.774795",
"0.76719385",
"0.7513296",
"0.74437535",
"0.7442352",
"0.7441561",
"0.74133223",
"0.74000615",
"0.7340881",
"0.7339651",
"0.73148525",
"0.7309931",
"0.7304094"... | 0.7266894 | 30 |
A URI is absolute when it has a scheme | def absolute?; !scheme.nil?; end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def normalize_uri uri\n (uri =~ /^https?:/) ? uri : \"http://#{uri}\"\n end",
"def normalize_uri(uri)\n (uri =~ /^(https?|ftp|file):/) ? uri : \"http://#{uri}\"\n end",
"def normalize_uri(uri)\n (uri =~ /^(https?|ftp|file):/) ? uri : \"http://#{uri}\"\n end",
"def absolute?\n if @scheme\... | [
"0.7521802",
"0.74120474",
"0.73712873",
"0.7232414",
"0.71107334",
"0.70872813",
"0.70455635",
"0.70425075",
"0.7037392",
"0.7024583",
"0.7011962",
"0.7011962",
"0.6985663",
"0.6974694",
"0.6956826",
"0.6953116",
"0.6944954",
"0.6937592",
"0.6937247",
"0.69351107",
"0.692499... | 0.7837661 | 0 |
A URI is relative when it does not have a scheme | def relative?; !absolute?; end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def normalize_uri uri\n (uri =~ /^https?:/) ? uri : \"http://#{uri}\"\n end",
"def normalize_uri(uri)\n (uri =~ /^(https?|ftp|file):/) ? uri : \"http://#{uri}\"\n end",
"def normalize_uri(uri)\n (uri =~ /^(https?|ftp|file):/) ? uri : \"http://#{uri}\"\n end",
"def fix_relative_path(entry)\n ... | [
"0.7335719",
"0.72419953",
"0.7230024",
"0.71169907",
"0.7099781",
"0.7080596",
"0.706586",
"0.702215",
"0.69953",
"0.69541883",
"0.69534457",
"0.6935864",
"0.68578184",
"0.6857104",
"0.68541163",
"0.68436754",
"0.68339384",
"0.68264896",
"0.68264896",
"0.6826108",
"0.6815498... | 0.0 | -1 |
Attempt to make this URI relative to the provided `base_uri`. If successful, returns a relative URI, otherwise the original URI | def relativize(base_uri)
if self.to_s.start_with?(base_uri.to_s) && %w(# ?).include?(self.to_s[base_uri.to_s.length, 1]) ||
base_uri.to_s.end_with?("/", "#") &&
self.to_s.start_with?(base_uri.to_s)
return RDF::URI(self.to_s[base_uri.to_s.length..-1])
else
# Create a list of parents, for which this IRI may be relative.
u = RDF::URI(base_uri)
iri_set = u.to_s.end_with?('/') ? [u.to_s] : []
iri_set << u.to_s while (u = u.parent)
iri_set.each_with_index do |bb, index|
next unless self.to_s.start_with?(bb)
rel = "../" * index + self.to_s[bb.length..-1]
return rel.empty? ? "./" : rel
end
end
self
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def relative_uri(uri)\n raise ArgumentError, \"relative_uri: uri may not be nil\" if uri.nil?\n uri = ensure_uri(uri)\n new_location = nil\n if uri.path[0...1] == '/'\n new_location = base.dup\n new_location.path = uri.path\n new_location.query = uri.query\n ... | [
"0.7798467",
"0.7185951",
"0.7173934",
"0.6980952",
"0.69449806",
"0.6737634",
"0.6698724",
"0.6639734",
"0.660973",
"0.6523994",
"0.6492987",
"0.64909244",
"0.6470132",
"0.64269817",
"0.64144635",
"0.6359792",
"0.6345061",
"0.627533",
"0.6250069",
"0.6212608",
"0.6190949",
... | 0.7484363 | 1 |
Returns the string length of this URI. | def length
to_s.length
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def length\n @string.length\n end",
"def length\n string.length\n end",
"def length()\n return to_s.size\n end",
"def length\n to_s.length\n end",
"def length\n self.to_s.length\n end",
"def length()\n return to_s.gsub(\" \", \"\").gsub(\"-\", \"\").length\n end",
... | [
"0.78242993",
"0.7724559",
"0.7637541",
"0.73778033",
"0.7371393",
"0.7208186",
"0.7177713",
"0.71396446",
"0.7110266",
"0.70629054",
"0.7050632",
"0.7050632",
"0.7050632",
"0.69761026",
"0.69761026",
"0.6953073",
"0.69275385",
"0.68784344",
"0.68784344",
"0.68784344",
"0.684... | 0.77246404 | 1 |
Determine if the URI is a valid according to RFC3987 Note that RDF URIs syntactically can contain Unicode escapes, which are unencoded in the internal representation. To validate, %encode specifically excluded characters from IRIREF | def valid?
RDF::URI::IRI.match?(to_s) || false
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_uri_validator\n # invalid\n assert !RDF::valid_uri?(\"use`quote\")\n # assert !RDF::valid_uri?(\"use%7quote\")\n\n # valid\n assert RDF::valid_uri?(\"use%07quote\")\n\n end",
"def validate_uri(uri)\n return validate({:uri => uri})\n end",
"def validate_uri(uri)\n validate_... | [
"0.7567005",
"0.6716636",
"0.6687121",
"0.6640146",
"0.6630042",
"0.6554732",
"0.65251786",
"0.64884233",
"0.639766",
"0.63731575",
"0.6357588",
"0.63440907",
"0.6264105",
"0.6231117",
"0.6228975",
"0.61731523",
"0.6169825",
"0.6163464",
"0.61469966",
"0.6145928",
"0.6138145"... | 0.7392179 | 1 |
Returns a copy of this URI converted into its canonical lexical representation. | def canonicalize
self.dup.canonicalize!
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def normalize\n uri = dup\n uri.normalize!\n uri\n end",
"def canonicalize\n # This is the opportunity to use exclusive canonicalization library\n self\n end",
"def canonical\n return self\n end",
"def normalize\n Wgit::Url.new(@uri.normalize.to_s)\n end",
"de... | [
"0.6818673",
"0.6529828",
"0.64043826",
"0.6326661",
"0.63262874",
"0.6082692",
"0.60694903",
"0.6058368",
"0.60122347",
"0.6008004",
"0.5945512",
"0.59415907",
"0.5842832",
"0.5828766",
"0.57785445",
"0.576998",
"0.5727437",
"0.56738216",
"0.5668283",
"0.56329685",
"0.561124... | 0.68084174 | 2 |
Converts this URI into its canonical lexical representation. | def canonicalize!
@object = {
scheme: normalized_scheme,
authority: normalized_authority,
path: normalized_path.squeeze('/'),
query: normalized_query,
fragment: normalized_fragment
}
@value = nil
@hash = nil
self
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def normalize\n uri = dup\n uri.normalize!\n uri\n end",
"def canonicalize\n self.dup.canonicalize!\n end",
"def canonicalize\n self.dup.canonicalize!\n end",
"def canonicalize\n # This is the opportunity to use exclusive canonicalization library\n self\n end",
... | [
"0.6599982",
"0.65771496",
"0.65771496",
"0.6460843",
"0.63479686",
"0.63312876",
"0.62683",
"0.61958873",
"0.6117227",
"0.60698706",
"0.6047525",
"0.60321724",
"0.5987875",
"0.5980974",
"0.5972693",
"0.59446776",
"0.5913647",
"0.5912035",
"0.59016496",
"0.58861965",
"0.58760... | 0.581203 | 21 |
Joins several URIs together. This method conforms to join normalization semantics as per RFC3986, section 5.2. This method normalizes URIs, removes some duplicate path information, such as double slashes, and other behavior specified in the RFC. Other URI building methods are `/` and `+`. For an uptodate list of edge case behavior, see the shared examples for RDF::URI in the rdfspec project. | def join(*uris)
joined_parts = object.dup.delete_if {|k, v| %i(user password host port).include?(k)}
uris.each do |uri|
uri = RDF::URI.new(uri) unless uri.is_a?(RDF::URI)
next if uri.to_s.empty? # Don't mess with base URI
case
when uri.scheme
joined_parts = uri.object.merge(path: self.class.normalize_path(uri.path))
when uri.authority
joined_parts[:authority] = uri.authority
joined_parts[:path] = self.class.normalize_path(uri.path)
joined_parts[:query] = uri.query
when uri.path.to_s.empty?
joined_parts[:query] = uri.query if uri.query
when uri.path[0,1] == '/'
joined_parts[:path] = self.class.normalize_path(uri.path)
joined_parts[:query] = uri.query
else
# Merge path segments from section 5.2.3
# Note that if the path includes no segments, the entire path is removed
# > return a string consisting of the reference's path component appended to all but the last segment of the base URI's path (i.e., excluding any characters after the right-most "/" in the base URI path, or excluding the entire base URI path if it does not contain any "/" characters).
base_path = path.to_s.include?('/') ? path.to_s.sub(/\/[^\/]*$/, '/') : ''
joined_parts[:path] = self.class.normalize_path(base_path + uri.path)
joined_parts[:query] = uri.query
end
joined_parts[:fragment] = uri.fragment
end
# Return joined URI
RDF::URI.new(**joined_parts)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def join(*uris)\n uris[0] = convert_to_uri(uris[0])\n uris.inject :merge\n end",
"def join(*uris)\n uris[0] = convert_to_uri(uris[0])\n uris.inject :merge\n end",
"def uri_join(*paths)\n return nil if paths.length == 0\n leadingslash = paths[0][0] == '/' ? '/' : ''\n tr... | [
"0.7892279",
"0.7892279",
"0.77287817",
"0.7523528",
"0.68324447",
"0.6707975",
"0.660298",
"0.6260196",
"0.6181789",
"0.6165321",
"0.6100802",
"0.6088622",
"0.6007124",
"0.5955257",
"0.59117395",
"0.57880795",
"0.5727168",
"0.5663237",
"0.56252676",
"0.5624152",
"0.55150056"... | 0.8249804 | 0 |
Simple concatenation operator. Returns a URI formed from concatenating the string form of two elements. For building URIs from fragments, you may want to use the smart separator, `/`. `join` implements another set of URI building semantics. | def +(other)
RDF::URI.intern(self.to_s + other.to_s)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def uri_join(*parts)\n parts = parts.compact.map(&URI.method(:escape))\n URI.parse(parts.join('/')).normalize.to_s\nend",
"def uri_join(*paths)\n return nil if paths.length == 0\n leadingslash = paths[0][0] == '/' ? '/' : ''\n trailingslash = paths[-1][-1] == '/' ? '/' : ''\n paths.map! { |... | [
"0.7929895",
"0.75910294",
"0.7143632",
"0.7142891",
"0.70629734",
"0.6856805",
"0.6841601",
"0.68205595",
"0.67942464",
"0.67752045",
"0.67752045",
"0.67522466",
"0.67513764",
"0.6744937",
"0.6744937",
"0.6728955",
"0.67252284",
"0.67066264",
"0.67052096",
"0.6702243",
"0.66... | 0.6048935 | 48 |
Returns `true` if this URI's scheme is not hierarchical, or its path component is equal to `/`. Protocols not using hierarchical components are always considered to be at the root. | def root?
!self.hier? || self.path == '/' || self.path.to_s.empty?
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def hierarchical?\n if @path\n true\n else\n false\n end\n end",
"def root?\n self.path == '/'\n end",
"def root?\n #%r{\\A/+\\z} =~ @path ? true : false\n @absolute and @path.empty?\n end",
"def root_path_match?\n url == '/'\n end",
"def... | [
"0.71204585",
"0.70288694",
"0.70263594",
"0.69384587",
"0.68374795",
"0.6809845",
"0.6679339",
"0.66516566",
"0.66320676",
"0.65941095",
"0.65214866",
"0.6452689",
"0.64093447",
"0.6278995",
"0.6188539",
"0.61794555",
"0.61390686",
"0.61227983",
"0.6120351",
"0.60914403",
"0... | 0.71649826 | 0 |
Returns a copy of this URI with the path component set to `/`. | def root
if root?
self
else
RDF::URI.new(
**object.merge(path: '/').
keep_if {|k, v| %i(scheme authority path).include?(k)})
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def to_path\n path = @uri.path\n return nil if path.nil? || path.empty?\n return Wgit::Url.new('/') if path == '/'\n\n Wgit::Url.new(path).omit_slashes\n end",
"def to_path\n path = @uri.path\n return nil if path.nil? || path.empty?\n return Wgit::Url.new('/') if path == '/'... | [
"0.70479107",
"0.7031522",
"0.6819957",
"0.66103965",
"0.65826094",
"0.653099",
"0.65304095",
"0.6519244",
"0.6495895",
"0.64543647",
"0.64355516",
"0.6435313",
"0.6397667",
"0.6348464",
"0.6348464",
"0.6299575",
"0.6299279",
"0.6277853",
"0.6267489",
"0.6261258",
"0.62557137... | 0.7023127 | 2 |
Returns `true` if this URI is hierarchical and it's path component isn't equal to `/`. | def parent?
!root?
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def hierarchical?\n if @path\n true\n else\n false\n end\n end",
"def root?\n !self.hier? || self.path == '/' || self.path.to_s.empty?\n end",
"def empty_path?\n super || remaining_path == '/'\n end",
"def root?\n #%r{\\A/+\\z} =~ @path ? true : f... | [
"0.7859999",
"0.74288744",
"0.73693806",
"0.73129624",
"0.71888703",
"0.7181651",
"0.71026003",
"0.70278597",
"0.69944805",
"0.6985279",
"0.6931924",
"0.68636227",
"0.67810655",
"0.6728236",
"0.6718041",
"0.6717111",
"0.67157",
"0.66844594",
"0.6679112",
"0.66641575",
"0.6658... | 0.0 | -1 |
Returns a copy of this URI with the path component ascended to the parent directory, if any. | def parent
case
when root? then nil
else
require 'pathname' unless defined?(Pathname)
if path = Pathname.new(self.path).parent
uri = self.dup
uri.path = path.to_s
uri.path << '/' unless uri.root?
uri
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def path\n return @path if instance_variable_defined? :@path\n @path = parent.path + path_components\n end",
"def parent\n return nil if @path == '/'\n unless @path.to_s.empty?\n new_for_path ::File.split(@path).first\n end\n end",
"def absolute_parent_path\n File.d... | [
"0.7217175",
"0.7217136",
"0.6841817",
"0.67703414",
"0.67431974",
"0.6716965",
"0.6686956",
"0.663685",
"0.65876836",
"0.65383923",
"0.64868766",
"0.6484734",
"0.6484734",
"0.6462214",
"0.6444698",
"0.64378375",
"0.63509727",
"0.63314587",
"0.6320492",
"0.61971176",
"0.61708... | 0.7763911 | 0 |
Returns a duplicate copy of `self`. | def dup
self.class.new(@value, **(@object || {}))
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def copy\n\t\t\treturn self.dup\n\t\tend",
"def dup\n self.class.new.tap { |obj| reverse_dup_copy obj }\n end",
"def clone\n self.copy\n end",
"def clone\n self.copy\n end",
"def dup\n copy(false)\n end",
"def dup\n self\n end",
"def dup\n self\n ... | [
"0.8614923",
"0.8291893",
"0.8265274",
"0.8265274",
"0.8187097",
"0.81220704",
"0.81220704",
"0.81220704",
"0.81160927",
"0.80938375",
"0.8084067",
"0.7994447",
"0.78893566",
"0.7837781",
"0.7806604",
"0.77636474",
"0.7760943",
"0.77590895",
"0.77124214",
"0.770652",
"0.77039... | 0.7496458 | 32 |
Returns `true` if this URI ends with the given `string`. | def end_with?(string)
to_s.end_with?(string.to_s)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def ends_with?(str)\n return self.reverse.starts_with?(str.reverse)\n end",
"def endswith?(substring)\n self[-substring.size..-1] == substring\n end",
"def ends_with?(s)\n rindex(s) == size - s.size\n end",
"def ends_with?(s)\n rindex(s) == size - s.size\n end",
"def endsWith(search, endstr... | [
"0.78246677",
"0.7702655",
"0.7320164",
"0.7320164",
"0.728542",
"0.7245593",
"0.7245593",
"0.71299845",
"0.7120582",
"0.70980036",
"0.70502",
"0.6895989",
"0.6877284",
"0.6854528",
"0.6818893",
"0.68176025",
"0.6765727",
"0.65140146",
"0.6319272",
"0.6201982",
"0.6179051",
... | 0.82659924 | 0 |
Checks whether this URI the same term as `other`. | def eql?(other)
other.is_a?(URI) && self.hash == other.hash && self == other
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def look_same_as?(other)\n return nil unless other.kind_of?(self.class)\n (name == other.name) and (definition == other.definition)\n end",
"def eql?(other)\n if self.class.equal?(other.class)\n if @uri\n return @uri == other.uri\n else\n @name = othe... | [
"0.68057287",
"0.6773118",
"0.6649714",
"0.6633253",
"0.6536983",
"0.6524697",
"0.6488762",
"0.6423201",
"0.6333704",
"0.6318932",
"0.6262483",
"0.6262483",
"0.62394863",
"0.62260413",
"0.6198465",
"0.615556",
"0.61547214",
"0.6154344",
"0.61444265",
"0.61308867",
"0.61224747... | 0.6730809 | 2 |
Checks whether this URI is equal to `other` (type checking). Per SPARQL datar2/exprequal/eq22, numeric can't be compared with other types | def ==(other)
case other
when Literal
# If other is a Literal, reverse test to consolodate complex type checking logic
other == self
when String then to_s == other
when URI then hash == other.hash && to_s == other.to_s
else other.respond_to?(:to_uri) && to_s == other.to_uri.to_s
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def ==(other)\n # If lexically invalid, use regular literal testing\n return super unless self.valid?\n\n case other\n when Literal::Numeric\n return super unless other.valid?\n (cmp = (self <=> other)) ? cmp.zero? : false\n when RDF::URI, RDF::Node\n # Interpreting SP... | [
"0.81873524",
"0.8160632",
"0.7332495",
"0.7156066",
"0.70878416",
"0.69334614",
"0.6909391",
"0.68815875",
"0.68359244",
"0.6732688",
"0.6710648",
"0.66436785",
"0.66308224",
"0.6623823",
"0.6619178",
"0.6618278",
"0.6618278",
"0.66032296",
"0.6599321",
"0.6551745",
"0.65458... | 0.71977645 | 3 |
Checks for case equality to the given `other` object. | def ===(other)
case other
when Regexp then other === to_s
else self == other
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def casecmp(other)\n return nil unless other.kind_of?(Symbol)\n lhs = self.to_s; lhs.upcase!\n rhs = other.to_s.upcase\n lhs <=> rhs\n end",
"def ==(other)\n @hashed == other.hash && @downcase_key == other.downcase_key\n end",
"def eql?(o)\n self.class == o.class &&\n ... | [
"0.69316214",
"0.657068",
"0.6437723",
"0.64171106",
"0.6405299",
"0.63610554",
"0.6240462",
"0.61613035",
"0.61117864",
"0.6109986",
"0.6109415",
"0.60949147",
"0.6085994",
"0.6085994",
"0.6085994",
"0.6085994",
"0.6085994",
"0.60839516",
"0.6077115",
"0.60763466",
"0.606903... | 0.6019579 | 24 |
Performs a pattern match using the given regular expression. | def =~(pattern)
case pattern
when Regexp then to_s =~ pattern
else super # `Object#=~` returns `false`
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def match(pattern); end",
"def match(regexp); end",
"def match(input)\n regexp.match(input)\n end",
"def pattern2regex(pattern); end",
"def match(regex, &action)\n append_action(regex, action)\n self\n end",
"def regex(pattern)\n Regexp.new pattern.regex\n end",
"def match_... | [
"0.8037108",
"0.8013888",
"0.7835189",
"0.71711105",
"0.7136885",
"0.7006091",
"0.69647014",
"0.6874318",
"0.6870734",
"0.6776858",
"0.67500234",
"0.67498136",
"0.67094076",
"0.6672529",
"0.660472",
"0.6602487",
"0.66024494",
"0.65723014",
"0.6548848",
"0.65263003",
"0.652630... | 0.60246676 | 61 |
Returns the string representation of this URI. | def to_str; value; end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def to_s\n @uri.to_s\n end",
"def to_s\n @uri.to_s\n end",
"def to_s\n uri.to_s\n end",
"def to_s\n self.uri.to_s\n end",
"def to_s\n uri_string\n end",
"def to_s\n @uri\n end",
"def to_s\n reconstruct_uri\n end",
"def to_s\n ur... | [
"0.88596934",
"0.88596934",
"0.8802674",
"0.8739039",
"0.8730348",
"0.85708445",
"0.8563305",
"0.84582186",
"0.8366373",
"0.8166616",
"0.7738749",
"0.74966395",
"0.7493255",
"0.73826444",
"0.7380107",
"0.7380107",
"0.7345186",
"0.7325979",
"0.7305746",
"0.7279842",
"0.7237240... | 0.0 | -1 |
Returns a String representation of the URI object's state. | def inspect
sprintf("#<%s:%#0x URI:%s>", URI.to_s, self.object_id, self.to_s)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def to_s\n @uri.to_s\n end",
"def to_s\n @uri.to_s\n end",
"def to_s\n self.uri.to_s\n end",
"def to_s\n uri_string\n end",
"def to_s\n uri.to_s\n end",
"def to_s\n @uri\n end",
"def to_s\n @uri\n end",
"def to_s\n reconstruct_uri\n... | [
"0.76863825",
"0.76863825",
"0.75676817",
"0.7547951",
"0.7547421",
"0.753368",
"0.7499029",
"0.7342653",
"0.7315154",
"0.715362",
"0.71126086",
"0.70533454",
"0.7020359",
"0.69008005",
"0.69008005",
"0.68982095",
"0.68846625",
"0.67753845",
"0.67496663",
"0.67254514",
"0.665... | 0.7054479 | 11 |
lexical representation of URI, either absolute or relative | def value
return @value if @value
@value = [
("#{scheme}:" if absolute?),
("//#{authority}" if authority),
path,
("?#{query}" if query),
("##{fragment}" if fragment)
].compact.join("").freeze
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def process_uri(uri)\n ret = uri\n\n require 'open-uri'\n require 'open_uri_redirections'\n begin\n open(uri, :allow_redirections => :safe) do |r|\n ret = r.base_uri.to_s\n end\n end\n\n ret\n end",
"def to_s\n uri.to_s\n end",
"def normalise\n Wgit::Url.new... | [
"0.7155539",
"0.71046185",
"0.7075379",
"0.7074657",
"0.7047513",
"0.70445466",
"0.703514",
"0.703514",
"0.70280117",
"0.7010163",
"0.69922096",
"0.6987346",
"0.6970649",
"0.69701123",
"0.69569767",
"0.6949164",
"0.69460976",
"0.693174",
"0.6912456",
"0.6899113",
"0.689766",
... | 0.0 | -1 |
Returns a hash code for this URI. | def hash
@hash || @hash = (value.hash * -1)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def uri_hash # :nodoc:\n require_relative '../openssl'\n\n normalized =\n if @repository =~ %r{^\\w+://(\\w+@)?}\n uri = URI(@repository).normalize.to_s.sub %r{/$},''\n uri.sub(/\\A(\\w+)/) { $1.downcase }\n else\n @repository\n end\n\n OpenSSL::Digest::SHA1.hexdigest n... | [
"0.7003761",
"0.68944246",
"0.6792344",
"0.6728629",
"0.6728629",
"0.6654333",
"0.6635607",
"0.6635253",
"0.6613659",
"0.6607517",
"0.6583029",
"0.65798235",
"0.6576935",
"0.6543195",
"0.6537118",
"0.6537118",
"0.6537118",
"0.6537118",
"0.6537118",
"0.6537118",
"0.6537118",
... | 0.6037548 | 67 |
Return normalized version of scheme, if any | def normalized_scheme
scheme.strip.downcase if scheme
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def normalize_scheme(scheme)\n return \"http\" unless scheme\n scheme.to_s[/^\\w+/]\n end",
"def normalize!\n if path&.empty?\n set_path('/')\n end\n if scheme && scheme != scheme.downcase\n set_scheme(self.scheme.downcase)\n end\n if host && host != host.downc... | [
"0.66337657",
"0.6199161",
"0.6087601",
"0.6033761",
"0.6033761",
"0.6033761",
"0.59784687",
"0.59784687",
"0.59784687",
"0.59784687",
"0.5930547",
"0.592364",
"0.59148175",
"0.58328164",
"0.57649523",
"0.5764647",
"0.5763254",
"0.5731132",
"0.5644569",
"0.56404644",
"0.56273... | 0.7910781 | 0 |
Normalized version of user | def normalized_user
URI.encode(CGI.unescape(user), ENCODE_USER).force_encoding(Encoding::UTF_8) if user
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def normalized_userinfo\n normalized_user + (password ? \":#{normalized_password}\" : \"\") if userinfo\n end",
"def user_hack\n user.as_json\n end",
"def canonical_user(opennebula_user)\n fail 'User object not provided!' unless opennebula_user\n {\n id: opennebula_user['ID'].to_i,\n ... | [
"0.70710737",
"0.65524244",
"0.6405865",
"0.6349357",
"0.63392705",
"0.6312053",
"0.62737787",
"0.6273305",
"0.62087476",
"0.6185879",
"0.6162272",
"0.6139768",
"0.6139768",
"0.60600585",
"0.60592043",
"0.60592043",
"0.6044825",
"0.60266703",
"0.59913605",
"0.5974402",
"0.595... | 0.75873435 | 0 |
Normalized version of password | def normalized_password
URI.encode(CGI.unescape(password), ENCODE_PASSWORD).force_encoding(Encoding::UTF_8) if password
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def password\n 'password'.gsub('', '')\n end",
"def standard_password\n \"!@Cd5678\"\n end",
"def to_s\n %[#<password safe>]\n end",
"def to_s\n %[#<password safe>]\n end",
"def password_clean\n crypted_password.decrypt(salt)\n end",
"def password_clean\n crypted_password.d... | [
"0.7514999",
"0.7390311",
"0.7065319",
"0.7065319",
"0.7017649",
"0.69719917",
"0.6937214",
"0.6802017",
"0.66510385",
"0.65986294",
"0.6570602",
"0.65625656",
"0.6560369",
"0.65564686",
"0.6548501",
"0.64932597",
"0.64585733",
"0.6421116",
"0.64018255",
"0.63607806",
"0.6347... | 0.8098254 | 0 |
Normalized version of host | def normalized_host
# Remove trailing '.' characters
host.sub(/\.*$/, '').downcase if host
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def normalized_host; end",
"def host_as_string; end",
"def hostname\n v = self.host\n v&.start_with?('[') && v.end_with?(']') ? v[1..-2] : v\n end",
"def full_host\n a = Addressable::URI.heuristic_parse(LygneoClient.scheme + \"://\" + self.host)\n a.port ||= a.inferred_port\n a\n ... | [
"0.85485524",
"0.72463536",
"0.6683324",
"0.66771674",
"0.6601006",
"0.65678555",
"0.65357023",
"0.64382637",
"0.6436221",
"0.6422333",
"0.6361616",
"0.63474184",
"0.6335246",
"0.6305359",
"0.6304074",
"0.6258679",
"0.6255576",
"0.6255576",
"0.6198166",
"0.6164743",
"0.613388... | 0.79118025 | 1 |
Normalized version of port | def normalized_port
if port
np = port.to_i
PORT_MAPPING[normalized_scheme] != np ? np : nil
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def port_string; end",
"def actual_port; end",
"def actual_port; end",
"def port_string\n (protocol == 'http://' && port == 80) || (protocol == 'https://' && port == 443) ? '' : \":#{port}\"\n end",
"def standard_port; end",
"def port=(_); end",
"def raw_host_with_port; end",
"def port(port, ... | [
"0.7742748",
"0.7472065",
"0.7472065",
"0.7131191",
"0.71001846",
"0.689193",
"0.6829901",
"0.68248224",
"0.6801955",
"0.67800945",
"0.67529",
"0.66881853",
"0.6676634",
"0.6645483",
"0.65834945",
"0.6576981",
"0.65671283",
"0.6550116",
"0.6537103",
"0.65159327",
"0.6505089",... | 0.80462945 | 0 |
Normalized version of path | def normalized_path
if normalized_scheme == "urn"
# Special-case URI. Normalize the NID component only
nid, p = path.to_s.split(':', 2)
return "#{nid.downcase}:#{p}"
end
segments = path.to_s.split('/', -1) # preserve null segments
norm_segs = case
when authority
# ipath-abempty
segments.map {|s| normalize_segment(s, ENCODE_ISEGMENT)}
when segments[0].nil?
# ipath-absolute
res = [nil]
res << normalize_segment(segments[1], ENCODE_ISEGMENT) if segments.length > 1
res += segments[2..-1].map {|s| normalize_segment(s, ENCODE_ISEGMENT)} if segments.length > 2
res
when segments[0].to_s.index(':')
# ipath-noscheme
res = []
res << normalize_segment(segments[0], ENCODE_ISEGMENT_NC)
res += segments[1..-1].map {|s| normalize_segment(s, ENCODE_ISEGMENT)} if segments.length > 1
res
when segments[0]
# ipath-rootless
# ipath-noscheme
res = []
res << normalize_segment(segments[0], ENCODE_ISEGMENT)
res += segments[1..-1].map {|s| normalize_segment(s, ENCODE_ISEGMENT)} if segments.length > 1
res
else
# Should be empty
segments
end
res = self.class.normalize_path(norm_segs.join("/"))
# Special rules for specific protocols having empty paths
(res.empty? && %w(http https ftp tftp).include?(normalized_scheme)) ? '/' : res
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def normalize_path(path); end",
"def normalize_path(path); end",
"def normalize_path(path)\n path.sub(%r{^/}, '').tr('', '')\n end",
"def normalize_path(path)\n path = \"/#{path}\"\n path.squeeze!('/')\n path.sub!(%r{/+\\Z}, '')\n path = '/' if path == ''\n path\n end",
... | [
"0.8575562",
"0.8575562",
"0.79242694",
"0.78885746",
"0.78707784",
"0.7827658",
"0.78274065",
"0.7822997",
"0.7721619",
"0.7517025",
"0.73643434",
"0.7315313",
"0.7282565",
"0.72387934",
"0.72382367",
"0.7137711",
"0.71256125",
"0.7053488",
"0.7048544",
"0.70275533",
"0.6910... | 0.7779253 | 8 |
Normalized version of query | def normalized_query
normalize_segment(query, ENCODE_IQUERY) if query
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def normalize_query(query, filters)\n filter = normalize_filters filters\n query ||= {match_all: {}}\n return query unless filter\n {filtered: {\n query: query,\n filter: filter,\n }}\n end",
"def query\n normalize_query(phrase_query, query_filters.compact)\n en... | [
"0.6765057",
"0.6573254",
"0.65565366",
"0.6501556",
"0.6496022",
"0.64419705",
"0.6435414",
"0.64274544",
"0.6392045",
"0.6353683",
"0.63371205",
"0.6289751",
"0.6289751",
"0.6269535",
"0.62375116",
"0.6207584",
"0.6196652",
"0.6196652",
"0.6192354",
"0.6108737",
"0.6095339"... | 0.76657283 | 0 |
Normalized version of fragment | def normalized_fragment
normalize_segment(fragment, ENCODE_IFRAGMENT) if fragment
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def unnormalized; end",
"def normalized; end",
"def normalize; end",
"def to_uri_fragment\n # remove HTML tags from the input\n buf = gsub(/<.*?>/, '')\n\n # The first or only character must be a letter.\n buf.insert(0, 'a') unless buf[0,1] =~ /[[:alpha:]]/\n\n # The remaining charac... | [
"0.6597386",
"0.6518669",
"0.61620784",
"0.5987471",
"0.5955478",
"0.5912656",
"0.5911502",
"0.5832592",
"0.57712483",
"0.57440376",
"0.5621748",
"0.5592308",
"0.5551364",
"0.5551364",
"0.55459446",
"0.5518046",
"0.54817843",
"0.5459172",
"0.54372334",
"0.5432113",
"0.5406932... | 0.7743917 | 0 |
Authority is a combination of user, password, host and port | def authority
object.fetch(:authority) {
@object[:authority] = (format_authority if @object[:host])
}
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def authority\n if port == default_port\n host\n else\n \"#{host}:#{port}\"\n end\n end",
"def normalized_authority\n if authority\n (userinfo ? normalized_userinfo.to_s + \"@\" : \"\") +\n normalized_host.to_s +\n (normalized_port ? \":\" + normalized_po... | [
"0.76548916",
"0.6994216",
"0.66359764",
"0.66359764",
"0.66345346",
"0.6577717",
"0.6577717",
"0.65482724",
"0.6419264",
"0.6229027",
"0.62023234",
"0.6188794",
"0.6157256",
"0.61285937",
"0.60644734",
"0.60495067",
"0.60348874",
"0.6030787",
"0.6030787",
"0.59932756",
"0.59... | 0.7132782 | 1 |
Return normalized version of authority, if any | def normalized_authority
if authority
(userinfo ? normalized_userinfo.to_s + "@" : "") +
normalized_host.to_s +
(normalized_port ? ":" + normalized_port.to_s : "")
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def authority\n object.fetch(:authority) {\n @object[:authority] = (format_authority if @object[:host])\n }\n end",
"def authority\n\t\t\t@options[:authority]\n\t\tend",
"def authority\n @authority ||= headers[AUTHORITY_KEY]\n end",
"def attribution\n user = roles.creat... | [
"0.7287796",
"0.64640313",
"0.6344367",
"0.58658844",
"0.58442056",
"0.5830571",
"0.581799",
"0.5788705",
"0.57717115",
"0.5745148",
"0.5663027",
"0.5646784",
"0.5646784",
"0.5646784",
"0.56200874",
"0.55238855",
"0.55058825",
"0.55049247",
"0.5501158",
"0.5487062",
"0.541480... | 0.78464764 | 0 |
Userinfo is a combination of user and password | def userinfo
object.fetch(:userinfo) {
@object[:userinfo] = (format_userinfo("") if @object[:user])
}
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def userinfo\n if @user.nil?\n nil\n elsif @password.nil?\n @user\n else\n @user + ':' + @password\n end\n end",
"def normalized_userinfo\n normalized_user + (password ? \":#{normalized_password}\" : \"\") if userinfo\n end",
"def set_userinfo(user, password ... | [
"0.8243963",
"0.7913994",
"0.7587867",
"0.75262856",
"0.731",
"0.72552866",
"0.7188723",
"0.7188723",
"0.7147487",
"0.71421254",
"0.70869935",
"0.70768684",
"0.6975653",
"0.6959565",
"0.6901119",
"0.6753152",
"0.6731607",
"0.670153",
"0.66948897",
"0.66948897",
"0.66948897",
... | 0.7170302 | 8 |
Normalized version of userinfo | def normalized_userinfo
normalized_user + (password ? ":#{normalized_password}" : "") if userinfo
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def user_info\n @user_info ||= raw_info\n end",
"def userinfo\n object.fetch(:userinfo) {\n @object[:userinfo] = (format_userinfo(\"\") if @object[:user])\n }\n end",
"def user_info\n @user_info ||= raw_info.nil? ? {} : raw_info\n end",
"def user_info\n @use... | [
"0.7345859",
"0.7283157",
"0.7106172",
"0.7106172",
"0.6963861",
"0.69487244",
"0.69423306",
"0.6932815",
"0.68075556",
"0.6799789",
"0.6766816",
"0.6587923",
"0.6549099",
"0.64215845",
"0.6275371",
"0.6245187",
"0.62253916",
"0.6204835",
"0.62034494",
"0.61169165",
"0.611598... | 0.8177837 | 0 |
The HTTP request URI for this URI. This is the path and the query string. | def request_uri
return nil if absolute? && scheme !~ /^https?$/
res = path.to_s.empty? ? "/" : path
res += "?#{self.query}" if self.query
return res
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def request_uri\n return unless @path\n\n url = @query ? \"#@path?#@query\" : @path.dup\n url.start_with?(?/.freeze) ? url : ?/ + url\n end",
"def request_uri\n calculate_uri(@http_request.url)\n end",
"def path\n @uri.request_uri\n end",
"def uri\n # special parsin... | [
"0.8067162",
"0.8027202",
"0.7807166",
"0.7789044",
"0.77674913",
"0.76981246",
"0.7580137",
"0.73566234",
"0.73130244",
"0.7262976",
"0.7168167",
"0.7099632",
"0.7063205",
"0.7022153",
"0.7013583",
"0.70110154",
"0.69411397",
"0.6936572",
"0.69125307",
"0.6910213",
"0.688228... | 0.8519658 | 0 |
Dump of data needed to reconsitute this object using Marshal.load | def _dump(level)
value
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def marshal_dump\n data\n end",
"def marshal_dump\n dump\n end",
"def marshal_dump; end",
"def _dump() end",
"def _dump(depth)\n scrooge_fetch_remaining\n scrooge_invalidate_updateable_result_set\n scrooge_dump_flag_this\n str = Marshal.dump(self)\n ... | [
"0.7710052",
"0.7330819",
"0.7198195",
"0.7160494",
"0.70642394",
"0.6983894",
"0.6914053",
"0.6892118",
"0.6873123",
"0.68053734",
"0.6790667",
"0.67697155",
"0.6747161",
"0.6734277",
"0.67225754",
"0.66745144",
"0.6670978",
"0.6655936",
"0.6627059",
"0.6613835",
"0.6605225"... | 0.0 | -1 |
Normalize a segment using a character range | def normalize_segment(value, expr, downcase = false)
if value
value = value.dup.force_encoding(Encoding::UTF_8)
decoded = CGI.unescape(value)
decoded.downcase! if downcase
URI.encode(decoded, expr).force_encoding(Encoding::UTF_8)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def normalize(string); end",
"def pre_normalize(text); end",
"def normalize(domain); end",
"def normalize(min, max, val)\n\t\t(val - min) / (max - min)\n\tend",
"def normalize(str) return str end",
"def normalize_range(range)\n ar = range.to_a\n if ar.min > 2 # normalizacja zadanego zakresu aby zaw... | [
"0.6527851",
"0.633165",
"0.61859167",
"0.602286",
"0.59891623",
"0.58817035",
"0.58598024",
"0.58482903",
"0.5842874",
"0.5806795",
"0.57999414",
"0.579788",
"0.57157785",
"0.5708137",
"0.56828976",
"0.5656884",
"0.56506157",
"0.56167525",
"0.56005234",
"0.5544738",
"0.55322... | 0.57185113 | 12 |
Creates a new Repo | def initialize(attr = {})
attr.reverse_merge!(client: Client)
@client = attr[:client]
@user = attr[:user]
@name = attr[:repo_name]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create_repo(options)\n options = options.dup\n Repo.new(connection, post(\"/repos/create\", options)['repository'])\n end",
"def create(project_name, repo_name, description) ; end",
"def create(*args)\n params = args.extract_options!\n normalize! params\n filter! VALID_REPO_OPTI... | [
"0.80459476",
"0.7637333",
"0.75332284",
"0.7516626",
"0.7498341",
"0.7376518",
"0.7333526",
"0.7302506",
"0.70701206",
"0.70360637",
"0.7032881",
"0.7021163",
"0.700455",
"0.6979545",
"0.69636345",
"0.6951494",
"0.69467336",
"0.68855107",
"0.6870654",
"0.6857486",
"0.6794424... | 0.0 | -1 |
Generates api path for current repository | def path
"repos/#{@user}/#{name}"
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def build_path\n path = %w(api where)\n path.concat api_method\n path = path.join('/')\n @path = \"/#{path}.json\"\n end",
"def api_path\n @api_path ||= find_api_path(base_uri)\n end",
"def base_path\n \"/api/v1\"\n end",
"def api_base_path\n \"/lol/platform/#{api_... | [
"0.74834174",
"0.7467335",
"0.7398668",
"0.72041404",
"0.7171341",
"0.7031587",
"0.70285195",
"0.70019346",
"0.6893392",
"0.68172485",
"0.6757019",
"0.67273915",
"0.6669937",
"0.6637179",
"0.6539643",
"0.65109295",
"0.65083784",
"0.64758027",
"0.6459216",
"0.64202243",
"0.636... | 0.59850067 | 44 |
Get the array of commits for current repository, loads remote data if it's isn't loaded yet | def commits
commits! if @commits.nil?
@commits
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def commits\n if !@commits\n @commits = []\n x = 0\n loop do\n x += 1\n response = get(\"/commits/list/#{repo.owner.login}/#{repo.name}/#{name}?page=#{x}\")\n break unless response.code == 200\n @commits += response['commits'].map { |c| Commit.new(conne... | [
"0.7559513",
"0.75347775",
"0.72896063",
"0.7270081",
"0.70407736",
"0.6925979",
"0.6911863",
"0.6902953",
"0.68893623",
"0.68328375",
"0.6807897",
"0.66945136",
"0.6675752",
"0.66626704",
"0.66564864",
"0.66254205",
"0.6491347",
"0.6464175",
"0.6459552",
"0.64576393",
"0.645... | 0.64150226 | 24 |
Load commits for current repository even if commits already loaded | def commits!
response = @client.get_json("#{path}/commits")
@commits = response.map do |commit|
Commit.new(client: @client, user: @user, repo: self, sha: commit['sha'])
end
@commits
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def load_commits(branch)\n commits = []\n skip = 0\n begin\n commits += @grit_repo.commits(branch, 500, skip)\n skip += 500\n end while commits.size == skip\n commits\n end",
"def load_commits(branch)\n commits = []\n page = 1\n begin... | [
"0.71749824",
"0.7099694",
"0.6955522",
"0.68989676",
"0.68090117",
"0.6773858",
"0.6516112",
"0.6372972",
"0.6366157",
"0.6344644",
"0.63419384",
"0.6337847",
"0.6289738",
"0.6258367",
"0.62359196",
"0.6118295",
"0.6099854",
"0.6059457",
"0.60153955",
"0.6007973",
"0.6007194... | 0.68532044 | 4 |
Check if repository data is loaded | def loaded?
not @hash.nil?
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def loaded?\n @data\n end",
"def loaded?\n @data\n end",
"def loaded?\n !!@data\n end",
"def loaded?\n !! @data\n end",
"def loaded?\n !! @data\n end",
"def loaded?\n true\n end",
"def loaded?\n !@all.nil?\n end",
"def is... | [
"0.6931263",
"0.6931263",
"0.68537444",
"0.6814137",
"0.6814137",
"0.67958385",
"0.6720854",
"0.6664617",
"0.66557854",
"0.6595938",
"0.6520688",
"0.6520688",
"0.6520688",
"0.6520688",
"0.6520688",
"0.6520688",
"0.6499781",
"0.64875275",
"0.6434503",
"0.6434503",
"0.64127886"... | 0.68102956 | 6 |
Gets the repository data, loads remote data if it's isn't loaded yet | def hash
@hash ||= @client.get_hash(path)
@hash
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def repositories\n # TODO : merge with current data\n load_repos\n end",
"def data\n @repo_data\n end",
"def get_repositories\n get(\"#{url_base}/repositories?#{dc}\")[\"data\"]\n end",
"def fetch_data\n parse_data(self.organization.find_data(self.data_path, \n :include => [:... | [
"0.7193181",
"0.68035823",
"0.6684663",
"0.66165185",
"0.6581571",
"0.6476494",
"0.64745724",
"0.64291596",
"0.6421045",
"0.6365012",
"0.63299966",
"0.6260138",
"0.6221928",
"0.6171689",
"0.6170989",
"0.616003",
"0.6117354",
"0.6113632",
"0.6051064",
"0.60351706",
"0.60138524... | 0.0 | -1 |
Proxy missing methods to contained hash | def method_missing(method, *args)
hash.send(method, *args)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def hash_method\n super\n end",
"def handle_inherited_hash(tf, method)\n if tf\n collect_inherited(method).inject(Doodle::OrderedHash.new){ |hash, item|\n hash.merge(Doodle::OrderedHash[*item])\n }.merge(@this.doodle.__send__(method))\n else\n @this.doodle.__send__... | [
"0.733741",
"0.6845855",
"0.6824094",
"0.68072355",
"0.6767971",
"0.6733433",
"0.652458",
"0.6474399",
"0.64657056",
"0.6436175",
"0.6398457",
"0.63396126",
"0.6307607",
"0.6307607",
"0.6307607",
"0.6307607",
"0.6307607",
"0.6307607",
"0.6307607",
"0.6301277",
"0.6301277",
... | 0.64447236 | 10 |
Casts self to string | def to_s
name
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def to_str\n self.to_s\n end",
"def to_s\n @string ||= Builder::ToString.new(self).string\n end",
"def to_str() end",
"def to_str() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",... | [
"0.7916055",
"0.75476277",
"0.75302124",
"0.75302124",
"0.7501386",
"0.7501386",
"0.7501386",
"0.7501386",
"0.7501386",
"0.7501386",
"0.7501386",
"0.7501386",
"0.7501386",
"0.7501386",
"0.7501386",
"0.7501386",
"0.7501386",
"0.7501386",
"0.7501386",
"0.7501386",
"0.7501386",
... | 0.0 | -1 |
Nova instancia do CrediSIS | def initialize(campos = {})
campos = { aceite: 'N' }.merge!(campos)
super(campos)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n \n end",
"def create!\n end",
"def crear\n @quienes_somos = QuienesSomo.new\nend",
"def create\n \t\n end",
"def create\n # Submit de new, crea nueva instance en DB.\n end",
"def create\n end",
"def create\r\n end",
"def create\n \n end",
"def create\n ... | [
"0.7090506",
"0.70836556",
"0.69784",
"0.6916089",
"0.69148743",
"0.68777597",
"0.68412375",
"0.6820526",
"0.6804661",
"0.6778836",
"0.67730135",
"0.67730135",
"0.67730135",
"0.67730135",
"0.67604864",
"0.67604864",
"0.6750314",
"0.6702641",
"0.6702641",
"0.6702641",
"0.66979... | 0.0 | -1 |
Informacoes da conta corrente do cedente | def info_conta
# CAMPO TAMANHO
# agencia 4
# complemento 1
# conta corrente 8
# digito da conta 1
# complemento 6
"#{agencia} #{conta_corrente}#{digito_conta}#{''.rjust(6, ' ')}"
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def info_conta\n # CAMPO TAMANHO\n # agencia 3\n # conta corrente 7\n \"#{agencia}#{conta_corrente}\"\n end",
"def info_conta\n # CAMPO TAMANHO\n # agencia 4\n # digito agencia 1\n # cont... | [
"0.681637",
"0.6731727",
"0.6693893",
"0.668763",
"0.65779316",
"0.6402519",
"0.6346293",
"0.6346293",
"0.6132345",
"0.6014266",
"0.5947765",
"0.5908351",
"0.577017",
"0.5710273",
"0.5710273",
"0.5704434",
"0.56807417",
"0.5605514",
"0.5605514",
"0.5574122",
"0.5574122",
"0... | 0.664624 | 4 |
::: :::::::: 06_stringCompression.rb :+: :+: :+: +:+ +:+ +:+ | def stringCompression(args)
i = 1;
arr = "";
count = 1;
len = args.length
while i < len + 1
if args[i - 1] == args[i]
count += 1
else
arr << args[i - 1] << count.to_s
count = 1
end
i += 1
end
arr.size > len ? args : arr
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def string_compression(str)\n\nend",
"def string_compression(string)\n count_char_hash = count_char(string)\n\n result = ''\n count_char_hash.each do |key, value|\n result += \"#{key}#{value}\"\n end\n if result.length == string.length\n string\n else\n result\n end\nend",
"def basic_compressio... | [
"0.88915604",
"0.79413134",
"0.78079724",
"0.7632892",
"0.7519092",
"0.742733",
"0.7404302",
"0.7368473",
"0.73505205",
"0.7345953",
"0.72694635",
"0.724083",
"0.71901494",
"0.7174862",
"0.7150533",
"0.7141393",
"0.7046321",
"0.70071596",
"0.6993195",
"0.69731563",
"0.6970497... | 0.7751684 | 3 |
From 5.0.0.4 Change Log " Fixed bug in MDTM command that potentially caused the daemon to crash." Nice way to play it down boys Connected to ftp2.rhinosoft.com. 220 ProFTPD 1.2.5rc1 Server (ftp2.rhinosoft.com) [62.116.5.74] Heh :) | def check
connect
disconnect
case banner
when /Serv-U FTP Server v4\.1/
print_status('Found version 4.1.0.3, exploitable')
return Exploit::CheckCode::Vulnerable
when /Serv-U FTP Server v5\.0/
print_status('Found version 5.0.0.0 (exploitable) or 5.0.0.4 (not), try it!');
return Exploit::CheckCode::Appears
when /Serv-U FTP Server v4\.0/
print_status('Found version 4.0.0.4 or 4.1.0.0, additional check.');
send_user(datastore['USER'])
send_pass(datastore['PASS'])
if (double_ff?())
print_status('Found version 4.0.0.4, exploitable');
return Exploit::CheckCode::Vulnerable
else
print_status('Found version 4.1.0.0, exploitable');
return Exploit::CheckCode::Vulnerable
end
when /Serv-U FTP Server/
print_status('Found an unknown version, try it!');
return Exploit::CheckCode::Detected
else
print_status('We could not recognize the server banner')
return Exploit::CheckCode::Safe
end
return Exploit::CheckCode::Safe
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def ftp(cmd)\n\tSTDOUT.puts \"FTP: not implemented\"\nend",
"def ftp_get_from_system\r\n\r\n require 'net/ftp'\r\n require 'timeout'\r\n\r\n f = File.open(\"#{RAILS_ROOT}/log/ftp_transfer.log\", 'w')\r\n\r\n system_id =params[:id]\r\n finish = false\r\n ftpdest=\"\"\r\n\r\n @system = System.... | [
"0.6100476",
"0.59066",
"0.57811457",
"0.5670654",
"0.55447966",
"0.54944986",
"0.5481807",
"0.53227586",
"0.52938956",
"0.52788913",
"0.52787006",
"0.5213847",
"0.5208526",
"0.51955295",
"0.5138872",
"0.51355004",
"0.50960016",
"0.50735354",
"0.5041442",
"0.5030916",
"0.4987... | 0.0 | -1 |
Fingerprint a single host | def run_host(ip)
begin
connect
# The SMB 2 dialect must be there
dialects = ['PC NETWORK PROGRAM 1.0', 'LANMAN1.0', 'Windows for Workgroups 3.1a', 'LM1.2X002', 'LANMAN2.1', 'NT LM 0.12', 'SMB 2.002', 'SMB 2.???']
data = dialects.collect { |dialect| "\x02" + dialect + "\x00" }.join('')
pkt = Rex::Proto::SMB::Constants::SMB_NEG_PKT.make_struct
pkt['Payload']['SMB'].v['Command'] = Rex::Proto::SMB::Constants::SMB_COM_NEGOTIATE
pkt['Payload']['SMB'].v['Flags1'] = 0x18
pkt['Payload']['SMB'].v['Flags2'] = 0xc853
pkt['Payload'].v['Payload'] = data
pkt['Payload']['SMB'].v['ProcessID'] = rand(0x10000)
pkt['Payload']['SMB'].v['MultiplexID'] = rand(0x10000)
sock.put(pkt.to_s)
res = sock.get_once
if(res and res.index("\xfeSMB"))
if(res.length >= 124)
vers = res[72,2].unpack("CC").map{|c| c.to_s}.join(".")
ctime = Rex::Proto::SMB::Utils.time_smb_to_unix(*(res[108,8].unpack("VV").reverse))
btime = Rex::Proto::SMB::Utils.time_smb_to_unix(*(res[116,8].unpack("VV").reverse))
utime = ctime - btime
print_status("#{ip} supports SMB 2 [dialect #{vers}] and has been online for #{utime/3600} hours")
#Add Report
report_note(
:host => ip,
:proto => 'smb2',
:port => rport,
:type => "supports SMB 2 [dialect #{vers}]",
:data => "supports SMB 2 [dialect #{vers}] and has been online for #{utime/3600} hours"
)
else
print_status("#{ip} supports SMB 2.0")
#Add Report
report_note(
:host => ip,
:proto => 'smb2',
:port => rport,
:type => "supports SMB 2.0",
:data => "supports SMB 2.0"
)
end
end
rescue ::Rex::ConnectionError
rescue ::Exception => e
print_error("#{rhost}: #{e.class} #{e} #{e.backtrace}")
ensure
disconnect
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fingerprint\n digest = Digest::SHA256.digest(to_secret_uri)\n \"ni:///sha-256;#{Cryptor::Encoding.encode(digest)}\"\n end",
"def set_fingerprint\n @fingerprint = Fingerprint.find(params[:id])\n end",
"def fingerprint\n text('//fingerprint').upcase\n end",
"def run_h... | [
"0.64529425",
"0.6422759",
"0.6351779",
"0.6325476",
"0.62800384",
"0.60431033",
"0.60413575",
"0.5980085",
"0.59782165",
"0.5959005",
"0.5943916",
"0.5801319",
"0.5781926",
"0.57693565",
"0.57418793",
"0.5739732",
"0.5735871",
"0.5706376",
"0.5634991",
"0.56011266",
"0.55838... | 0.0 | -1 |
This is for shuffling images from one check to another. Suppose there are two checks, check1 and check2. Check1 has got 10 images and check2 has got 5 images.If we want to add two images form check1 to check2, then specify the check2, from image name & To image name. | def index
@batch = Batch.select("batches.id AS id,batches.batchid AS batchid,batches.date AS date").where("status IN ('#{BatchStatus::NEW}','#{BatchStatus::PROCESSING}')")
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def shuffle!(png = Random.new) \n @cards.shuffle!(random: png)\n end",
"def shuffle()\n \n end",
"def random_product_image_filename\n [\"img_1.png\",\"img_2.png\", \"img_3.png\", \"img_4.jpg\", \"img_5.jpg\", \n \"img_6.jpg\", \"img_7.jpg\", \"img_8.jpg\", \"img_9.jpg\", \"img_10.jp... | [
"0.6709633",
"0.6480716",
"0.6392719",
"0.62549424",
"0.61722124",
"0.6038652",
"0.6032964",
"0.60202634",
"0.5932114",
"0.5925841",
"0.59254825",
"0.59211063",
"0.5892894",
"0.58878267",
"0.58878267",
"0.5881429",
"0.58716553",
"0.58337206",
"0.5827049",
"0.581813",
"0.58115... | 0.0 | -1 |
Bind this exchange to another exchange | def bind(exchange, opts = {})
if exchange.respond_to?(:add_route)
# we can do the binding ourselves
exchange.add_route opts.fetch(:routing_key, @name), self
else
# we need the channel to look up the exchange
@channel.xchg_bind self, opts.fetch(:routing_key, @name), exchange
end
self
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def bind(exchange, binding_key, arguments: {})\n @client.exchange_bind(@name, exchange, binding_key, arguments: arguments)\n self\n end",
"def bind_exchange_to_exchange(channel, exchange_in_name, queue_name, exchange_out_name)\n exchange_source = channel.fanout(exchange_in_name)\n queue_curr... | [
"0.71781206",
"0.71336573",
"0.7043615",
"0.6646767",
"0.65322006",
"0.6498141",
"0.6381982",
"0.6359548",
"0.62917846",
"0.6179219",
"0.61699784",
"0.61581975",
"0.6033768",
"0.5998082",
"0.59499586",
"0.59499586",
"0.59499586",
"0.59499586",
"0.59499586",
"0.59499586",
"0.5... | 0.72865677 | 0 |
Unbind this exchange from another exchange | def unbind(exchange, opts = {})
if exchange.respond_to?(:remove_route)
# we can do the unbinding ourselves
exchange.remove_route opts.fetch(:routing_key, @name), self
else
# we need the channel to look up the exchange
@channel.xchg_unbind opts.fetch(:routing_key, @name), exchange, self
end
self
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def unbind(exchange, binding_key, arguments: {})\n @client.exchange_unbind(@name, exchange, binding_key, arguments: arguments)\n self\n end",
"def exchange_unbind(destination, source, binding_key, arguments: {})\n write_bytes FrameBytes.exchange_unbind(@id, destination, source, bindin... | [
"0.7703316",
"0.7237125",
"0.71587723",
"0.6961066",
"0.68821263",
"0.6861492",
"0.6764336",
"0.6673356",
"0.66535664",
"0.6626625",
"0.6578966",
"0.6499297",
"0.6467013",
"0.6464879",
"0.6449453",
"0.64352584",
"0.6416178",
"0.6394224",
"0.6356966",
"0.6333237",
"0.6322279",... | 0.76902455 | 1 |
Check if a queue is bound to this exchange | def routes_to?(exchange_or_queue, opts = {})
route = exchange_or_queue.respond_to?(:name) ? exchange_or_queue.name : exchange_or_queue
rk = opts.fetch(:routing_key, route)
@routes.key?(rk) && @routes[rk].any? { |r| r == exchange_or_queue }
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def in_queue?\n !send(queue_position_column).nil?\n end",
"def in_queue?\n depq_or_subpriority().kind_of? Depq\n end",
"def exists?\n queue_exists?(@name)\n end",
"def pending_data?\n !pending_queues.empty?\n end",
"def busy?\n\n @queue.size ... | [
"0.73381853",
"0.72414804",
"0.7037419",
"0.67502725",
"0.6713161",
"0.66944987",
"0.6686837",
"0.6662362",
"0.65928096",
"0.65554553",
"0.65548813",
"0.65453625",
"0.654173",
"0.6522226",
"0.6519288",
"0.64599806",
"0.64294153",
"0.6422051",
"0.6415258",
"0.6403793",
"0.6403... | 0.6996047 | 3 |
Deliver a message to routes | def deliver(payload, opts, key)
# noOp
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def route!( message )\n new.route!( message )\n end",
"def send(message)\n if @subsystem_routes.has_key?(message.system_name)\n route_message(message, delivery_type: :send)\n else\n send_message(message)\n end\n end",
"def deliver_now\n m... | [
"0.6893355",
"0.66293144",
"0.6485044",
"0.6480171",
"0.63412684",
"0.6324151",
"0.62980586",
"0.62445265",
"0.62003857",
"0.6194757",
"0.61738497",
"0.61336535",
"0.60762674",
"0.607148",
"0.6034513",
"0.60276264",
"0.6012862",
"0.5973281",
"0.596857",
"0.5966217",
"0.596255... | 0.0 | -1 |
helper method makes a controller method available to all views or templates We'll now implement few controller methods that will help us utilize user authentication within our application. 384 385 We put those methods in the ApplicationController because we want them to be available in all of our controllers. We make the current_user and user_signed_in? helper methods by adding helper_method :current_user and helper_method user_signed_in? because we need to access those in the view files as well as controller files. | def user_signed_in?
current_user.present?
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def add_values_application_controller\n inject_into_class \"app/controllers/application_controller.rb\", \"ApplicationController\" do\n \" helper_method :current_user \\n \" +\n \" after_filter :path \\n\"+\n \" private \\n\" +\n \" def current_user \\n\" +\n \" ... | [
"0.65544266",
"0.6334796",
"0.62593347",
"0.6190386",
"0.6190386",
"0.61529356",
"0.6058271",
"0.6004751",
"0.597755",
"0.5894633",
"0.5879097",
"0.58668256",
"0.5835614",
"0.5784787",
"0.5782208",
"0.5772552",
"0.5772552",
"0.57441294",
"0.57039654",
"0.5693984",
"0.5680123"... | 0.0 | -1 |
class BinarySearchTree class Node attr_reader :key, :left, :right def initialize(key) | def invert_binary_tree(tree)
return nil if tree.nil?
tmp = tree.left
tree.left = tree.right
tree.right = tmp
tree.left = invert_binary_tree(tree.left)
tree.right = invert_binary_tree(tree.right)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def initialize(key)\n @key = key\n @left = nil\n @right = nil\n end",
"def search( key, node=@root )\n return nil if node.nil?\n if key < node.key\n search( key, node.left )\n elsif key > node.key\n search( key, node.right )\n else\n return node\n end\n end",
"def initi... | [
"0.84512514",
"0.6797697",
"0.67571414",
"0.674536",
"0.66935796",
"0.66885316",
"0.6621655",
"0.6561767",
"0.65536135",
"0.6522148",
"0.6483349",
"0.6452241",
"0.6443701",
"0.6434393",
"0.6433383",
"0.6411736",
"0.6405719",
"0.6393871",
"0.6384927",
"0.6355042",
"0.6350265",... | 0.0 | -1 |
GET /service_masters GET /service_masters.json | def index
@service_master = ServiceMaster.new
@service_masters = ServiceMaster.all
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def index\n @masterservices = Masterservice.all\n end",
"def index\n @service_type_masters = ServiceTypeMaster.all\n end",
"def set_masterservice\n @masterservice = Masterservice.find(params[:id])\n end",
"def index\n @masters = Master.all\n end",
"def index\n @masters = Master.all.p... | [
"0.75391024",
"0.6979484",
"0.6897962",
"0.6563478",
"0.63139915",
"0.61581856",
"0.613824",
"0.6128529",
"0.61161554",
"0.6096253",
"0.60698426",
"0.6060726",
"0.60410464",
"0.6037837",
"0.6005063",
"0.5983018",
"0.5983018",
"0.5983018",
"0.5942045",
"0.591292",
"0.59108144"... | 0.7198971 | 1 |
GET /service_masters/1 GET /service_masters/1.json | def show
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def index\n @masterservices = Masterservice.all\n end",
"def index\n @service_master = ServiceMaster.new\n @service_masters = ServiceMaster.all\n end",
"def set_masterservice\n @masterservice = Masterservice.find(params[:id])\n end",
"def index\n @service_type_masters = ServiceTypeMaste... | [
"0.7312",
"0.718824",
"0.69798535",
"0.6818455",
"0.64241487",
"0.64175326",
"0.6327885",
"0.6327885",
"0.6327885",
"0.6271178",
"0.6250846",
"0.6232455",
"0.61645126",
"0.61451316",
"0.61120623",
"0.6026043",
"0.59659827",
"0.59624106",
"0.5944199",
"0.5924845",
"0.5918045",... | 0.0 | -1 |
POST /service_masters POST /service_masters.json | def create
@service_master = ServiceMaster.new(service_master_params)
@service_masters = ServiceMaster.all
respond_to do |format|
if @service_master.save
@service_master = ServiceMaster.new
format.js { @flag = true }
else
flash.now[:alert] = 'About Already Exist.'
format.js { @flag = false }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n @masterservice = Masterservice.new(masterservice_params)\n\n respond_to do |format|\n if @masterservice.save\n format.html { redirect_to @masterservice, notice: 'Masterservice was successfully created.' }\n format.json { render :show, status: :created, location: @masterservice... | [
"0.7216409",
"0.67750895",
"0.6578927",
"0.64668584",
"0.6282189",
"0.62709135",
"0.620134",
"0.61889654",
"0.61861974",
"0.6155413",
"0.6090262",
"0.60795563",
"0.6051253",
"0.5990406",
"0.59378725",
"0.59124595",
"0.5886639",
"0.58829844",
"0.5861934",
"0.5859407",
"0.58578... | 0.60235316 | 13 |
PATCH/PUT /service_masters/1 PATCH/PUT /service_masters/1.json | def update
@service_master.update(service_master_params)
@service_master = ServiceMaster.new
@service_masters = ServiceMaster.all
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update\n respond_to do |format|\n if @masterservice.update(masterservice_params)\n format.html { redirect_to @masterservice, notice: 'Masterservice was successfully updated.' }\n format.json { render :show, status: :ok, location: @masterservice }\n else\n format.html { render ... | [
"0.68468153",
"0.672396",
"0.62835616",
"0.624663",
"0.62237656",
"0.62004936",
"0.6189609",
"0.6170119",
"0.61110705",
"0.61104804",
"0.60681194",
"0.60219634",
"0.60219634",
"0.5990539",
"0.5990539",
"0.59771204",
"0.5947153",
"0.592995",
"0.59294236",
"0.5920235",
"0.58997... | 0.70402193 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.