query stringlengths 7 9.55k | document stringlengths 10 363k | metadata dict | negatives listlengths 0 101 | negative_scores listlengths 0 101 | document_score stringlengths 3 10 | document_rank stringclasses 102
values |
|---|---|---|---|---|---|---|
This is a tad meh. We basically need to meta program our test methods as we'll want individual meths for each check so we get this easy to read in jenkins, but since we only know which lister to use once the program runs we'll have to extend ourselves lazily via class_eval which allows us to edit the class from within a class method. The ultimate result is a bunch of test_pkg_version methods. | def define_tests
@ours.each do |pkg|
their = @theirs.find { |x| x.name == pkg.name }
class_eval do
define_method("test_#{pkg.name}_#{pkg.version}") do
PackageVersionCheck.new(ours: pkg, theirs: their).run
end
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def define_tests\n Apt.update if Process.uid.zero? # update if root\n @lister.packages.each do |pkg|\n class_eval do\n define_method(\"test_#{pkg.name}_#{pkg.version}\") do\n PackageVersionCheck.new(pkg).run\n end\n end\n end\n end",
... | [
"0.79935706",
"0.6560548",
"0.6069621",
"0.59919196",
"0.5881773",
"0.5833435",
"0.57638437",
"0.5743722",
"0.57084703",
"0.5570949",
"0.5548126",
"0.5516116",
"0.5516116",
"0.5495896",
"0.54355466",
"0.54307795",
"0.54260075",
"0.54138124",
"0.5409251",
"0.53974664",
"0.5383... | 0.7360373 | 1 |
Added ERB parsing for the config file. Kinda of nice for more dynamic features that could be required. | def deserialize(config)
config.present? ? YAML.safe_load(ERB.new(config).result, [], [], false, content_path) : {}
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def generate_config_for apps\n\t\t\t\tr = ERB.new(erb_template).\n\t\t\t\t\tresult(erb_template_binding(apps, Nacreon::Config.domain,\n\t\t\t\t\t Nacreon::Config.nginx_port))\n\t\t\t\t# In case the gsub looks funny, it's there to allow both the\n\t\t\t\t# template and the output at least... | [
"0.6637162",
"0.6189307",
"0.6046174",
"0.6006817",
"0.60047555",
"0.59642124",
"0.59455884",
"0.59337044",
"0.5888536",
"0.5888536",
"0.58860826",
"0.58402175",
"0.582407",
"0.5771413",
"0.5754093",
"0.5748191",
"0.5736008",
"0.57303756",
"0.571367",
"0.56876326",
"0.5665059... | 0.0 | -1 |
Creates a new Parser. See also ::parse | def initialize
@tokens = []
@current_token = nil
@debug = false
@line = 0
@line_pos = 0
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def parser\n @parser ||= Parser.new(self)\n end",
"def parser\n @parser ||= Parser.new(self)\n end",
"def parser\n Parser.new(self, :mode=>mode)\n end",
"def parser\n @parser ||= Parser.new(self)\n end",
"def initialize parser\n @parser = parser\n end",
"def pa... | [
"0.7445383",
"0.7445383",
"0.7397252",
"0.7329526",
"0.726806",
"0.72557133",
"0.67240715",
"0.67240715",
"0.67240715",
"0.67240715",
"0.66707337",
"0.6666743",
"0.6568896",
"0.6517118",
"0.6508107",
"0.64593154",
"0.6416704",
"0.6383697",
"0.6375915",
"0.6331114",
"0.6294418... | 0.0 | -1 |
Builds a Heading of +level+ | def build_heading level
heading = RDoc::Markup::Heading.new level, text
skip :NEWLINE
heading
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def build_heading level\n type, text, = get\n\n text = case type\n when :TEXT then\n skip :NEWLINE\n text\n else\n unget\n ''\n end\n\n RDoc::Markup::Heading.new level, text\n end",
"def build_heading level\n heading = sup... | [
"0.8417932",
"0.7992036",
"0.766331",
"0.72736394",
"0.7115193",
"0.7091424",
"0.70675504",
"0.68406266",
"0.6820763",
"0.67263156",
"0.6411727",
"0.64061326",
"0.6369527",
"0.6271758",
"0.6270927",
"0.6246594",
"0.62451506",
"0.61768943",
"0.61630785",
"0.61508495",
"0.61322... | 0.8584473 | 0 |
Builds a List flush to +margin+ | def build_list margin
p :list_start => margin if @debug
list = RDoc::Markup::List.new
until @tokens.empty? do
type, data, column, = get
case type
when :BULLET, :LABEL, :LALPHA, :NOTE, :NUMBER, :UALPHA then
list_type = type
if column < margin then
unget
break
end
if list.type and list.type != list_type then
unget
break
end
list.type = list_type
case type
when :NOTE, :LABEL then
_, indent, = get # SPACE
if :NEWLINE == peek_token.first then
get
peek_type, new_indent, peek_column, = peek_token
indent = new_indent if
peek_type == :INDENT and peek_column >= column
unget
end
else
data = nil
_, indent, = get
end
list_item = build_list_item(margin + indent, data)
list << list_item if list_item
else
unget
break
end
end
p :list_end => margin if @debug
return nil if list.empty?
list
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def build_list margin\n p :list_start => margin if @debug\n\n list = RDoc::Markup::List.new\n label = nil\n\n until @tokens.empty? do\n type, data, column, = get\n\n case type\n when *LIST_TOKENS then\n if column < margin || (list.type && list.type != type) then\n unget\n... | [
"0.7264258",
"0.6973062",
"0.6146951",
"0.6004018",
"0.5907188",
"0.5867355",
"0.5827932",
"0.5822188",
"0.5780602",
"0.5739241",
"0.56009376",
"0.5592616",
"0.5541733",
"0.5504484",
"0.54857427",
"0.54697794",
"0.5468899",
"0.54651374",
"0.5457361",
"0.5425007",
"0.5424739",... | 0.74732697 | 0 |
Builds a ListItem that is flush to +indent+ with type +item_type+ | def build_list_item indent, item_type = nil
p :list_item_start => [indent, item_type] if @debug
list_item = RDoc::Markup::ListItem.new item_type
until @tokens.empty? do
type, data, column = get
if column < indent and
not type == :NEWLINE and
(type != :INDENT or data < indent) then
unget
break
end
case type
when :INDENT then
unget
list_item.push(*parse(indent))
when :TEXT then
unget
list_item << build_paragraph(indent)
when :HEADER then
list_item << build_heading(data)
when :NEWLINE then
list_item << RDoc::Markup::BlankLine.new
when *LIST_TOKENS then
unget
list_item << build_list(column)
else
raise ParseError, "Unhandled token #{@current_token.inspect}"
end
end
p :list_item_end => [indent, item_type] if @debug
return nil if list_item.empty?
list_item.parts.shift if
RDoc::Markup::BlankLine === list_item.parts.first and
list_item.length > 1
list_item
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def accept_list_item_start list_item\n type = @list_type.last\n\n case type\n when :NOTE, :LABEL then\n bullets = Array(list_item.label).map do |label|\n attributes(label).strip\n end.join \"\\n\"\n\n bullets << \"\\n:\"\n\n @prefix = ' ' * @indent\n @indent += 4\n @pr... | [
"0.6534294",
"0.64756286",
"0.63982606",
"0.6332178",
"0.6300186",
"0.6280757",
"0.62803835",
"0.6182565",
"0.61383456",
"0.6061238",
"0.6061238",
"0.60265017",
"0.6007495",
"0.59631926",
"0.58516514",
"0.5837462",
"0.57548285",
"0.57481664",
"0.57481664",
"0.56823754",
"0.56... | 0.81515324 | 0 |
Builds a Paragraph that is flush to +margin+ | def build_paragraph margin
p :paragraph_start => margin if @debug
paragraph = RDoc::Markup::Paragraph.new
until @tokens.empty? do
type, data, column, = get
case type
when :INDENT then
next if data == margin and peek_token[0] == :TEXT
unget
break
when :TEXT then
if column != margin then
unget
break
end
paragraph << data
skip :NEWLINE
else
unget
break
end
end
p :paragraph_end => margin if @debug
paragraph
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def build_paragraph margin\n p :paragraph_start => margin if @debug\n\n paragraph = RDoc::Markup::Paragraph.new\n\n until @tokens.empty? do\n type, data, column, = get\n\n if type == :TEXT and column == margin then\n paragraph << data\n\n break if peek_token.first == :BREAK\n\n ... | [
"0.828294",
"0.7077012",
"0.6331948",
"0.6310281",
"0.63017106",
"0.62282985",
"0.6197009",
"0.6139939",
"0.61095005",
"0.60806936",
"0.6039721",
"0.60332686",
"0.5999385",
"0.59649974",
"0.5960489",
"0.59318346",
"0.5919193",
"0.5900154",
"0.5900154",
"0.5900154",
"0.5884583... | 0.8040784 | 1 |
Builds a Verbatim that is flush to +margin+ | def build_verbatim margin
p :verbatim_begin => margin if @debug
verbatim = RDoc::Markup::Verbatim.new
until @tokens.empty? do
type, data, column, = get
case type
when :INDENT then
if margin >= data then
unget
break
end
indent = data - margin
verbatim << ' ' * indent
when :HEADER then
verbatim << '=' * data
_, _, peek_column, = peek_token
peek_column ||= column + data
verbatim << ' ' * (peek_column - column - data)
when :RULE then
width = 2 + data
verbatim << '-' * width
_, _, peek_column, = peek_token
peek_column ||= column + data + 2
verbatim << ' ' * (peek_column - column - width)
when :TEXT then
verbatim << data
when *LIST_TOKENS then
if column <= margin then
unget
break
end
list_marker = case type
when :BULLET then '*'
when :LABEL then "[#{data}]"
when :LALPHA, :NUMBER, :UALPHA then "#{data}."
when :NOTE then "#{data}::"
end
verbatim << list_marker
_, data, = get
verbatim << ' ' * (data - list_marker.length)
when :NEWLINE then
verbatim << data
break unless [:INDENT, :NEWLINE].include? peek_token[0]
else
unget
break
end
end
verbatim.normalize
p :verbatim_end => margin if @debug
verbatim
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def build_verbatim margin\n p :verbatim_begin => margin if @debug\n verbatim = RDoc::Markup::Verbatim.new\n\n min_indent = nil\n generate_leading_spaces = true\n line = ''.dup\n\n until @tokens.empty? do\n type, data, column, = get\n\n if type == :NEWLINE then\n line << data\n ... | [
"0.78023046",
"0.71912265",
"0.61379147",
"0.61276263",
"0.58679205",
"0.57041734",
"0.5671685",
"0.5600203",
"0.5551709",
"0.5530641",
"0.55081743",
"0.5490407",
"0.5484892",
"0.5384266",
"0.5378512",
"0.53358114",
"0.5310175",
"0.52667063",
"0.5237795",
"0.52332526",
"0.522... | 0.7935075 | 0 |
Pulls the next token from the stream. | def get
@current_token = @tokens.shift
p :get => @current_token if @debug
@current_token
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def next\n\t\tif @next_token\n\t\t\ttoken = @next_token\n\t\t\t@next_token = nil\n\t\t\treturn token\n\t\telse\n\t\t\ttoken = read_token\n\t\t\treturn token\n\t\tend\n\tend",
"def next\n token = next_token\n token = next_token while token&.empty?\n token\n end",
"def get_token\n @tokenbuf ... | [
"0.80374205",
"0.7607829",
"0.7580852",
"0.7458197",
"0.7431326",
"0.7334023",
"0.7309964",
"0.7282369",
"0.72726494",
"0.72726494",
"0.720337",
"0.7139261",
"0.7130224",
"0.7061388",
"0.69352496",
"0.69222224",
"0.68793917",
"0.6832228",
"0.68298364",
"0.68203497",
"0.678653... | 0.6346298 | 33 |
Parses the tokens into a Document | def parse indent = 0
p :parse_start => indent if @debug
document = []
until @tokens.empty? do
type, data, column, = get
if type != :INDENT and column < indent then
unget
break
end
case type
when :HEADER then
document << build_heading(data)
when :INDENT then
if indent > data then
unget
break
elsif indent == data then
next
end
unget
document << build_verbatim(indent)
when :NEWLINE then
document << RDoc::Markup::BlankLine.new
skip :NEWLINE, false
when :RULE then
document << RDoc::Markup::Rule.new(data)
skip :NEWLINE
when :TEXT then
unget
document << build_paragraph(indent)
# we're done with this paragraph (indent mismatch)
break if peek_token[0] == :TEXT
when *LIST_TOKENS then
unget
list = build_list(indent)
document << list if list
# we're done with this list (indent mismatch)
break if LIST_TOKENS.include? peek_token.first and indent > 0
else
type, data, column, line = @current_token
raise ParseError,
"Unhandled token #{type} (#{data.inspect}) at #{line}:#{column}"
end
end
p :parse_end => indent if @debug
document
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def parse(doc)\n parser = parser_class.new\n parser.run(doc)\n end",
"def parse(tokenizer)\n @doc = REXML::Document.new\n @pos = @doc\n while node=tokenizer.next\n append(node)\n end\n end",
"def parse_doc\n @parse_started = true\n nil while @lex... | [
"0.653267",
"0.65123236",
"0.6499569",
"0.6481074",
"0.60657245",
"0.6042443",
"0.5962066",
"0.5953052",
"0.59506637",
"0.5946258",
"0.59429085",
"0.59429085",
"0.59322566",
"0.5908728",
"0.5899579",
"0.58761257",
"0.5867523",
"0.5834811",
"0.5768888",
"0.5768888",
"0.5766613... | 0.6449262 | 4 |
Returns the next token on the stream without modifying the stream | def peek_token
token = @tokens.first || []
p :peek => token if @debug
token
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def next\n\t\tif @next_token\n\t\t\ttoken = @next_token\n\t\t\t@next_token = nil\n\t\t\treturn token\n\t\telse\n\t\t\ttoken = read_token\n\t\t\treturn token\n\t\tend\n\tend",
"def next\n token = next_token\n token = next_token while token&.empty?\n token\n end",
"def next_token\n tokens.sh... | [
"0.8332876",
"0.80467963",
"0.7955034",
"0.7954132",
"0.79513747",
"0.7866707",
"0.7755758",
"0.7742991",
"0.76905787",
"0.7653205",
"0.76486",
"0.7551597",
"0.75150716",
"0.73619646",
"0.7355825",
"0.72566706",
"0.72470903",
"0.72424835",
"0.7221907",
"0.71813124",
"0.714040... | 0.6509945 | 47 |
Skips a token of +token_type+, optionally raising an error. | def skip token_type, error = true
type, data, = get
return unless type # end of stream
return @current_token if token_type == type
unget
raise ParseError, "expected #{token_type} got #{@current_token.inspect}" if
error
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def skip token_type, error = true\n type, = get\n return unless type # end of stream\n return @current_token if token_type == type\n unget\n raise ParseError, \"expected #{token_type} got #{@current_token.inspect}\" if error\n end",
"def skip_token(nocov_token = T.unsafe(nil)); end",
"def consu... | [
"0.8023859",
"0.57336855",
"0.5639974",
"0.55563164",
"0.546961",
"0.5462235",
"0.53580624",
"0.51876",
"0.5172979",
"0.5165891",
"0.51409113",
"0.5130443",
"0.5073701",
"0.5073701",
"0.5043363",
"0.50416476",
"0.50350356",
"0.49987578",
"0.49729186",
"0.49450994",
"0.4929742... | 0.7890535 | 1 |
Consumes tokens until NEWLINE and turns them back into text | def text
text = ''
loop do
type, data, = get
text << case type
when :BULLET then
_, space, = get # SPACE
"*#{' ' * (space - 1)}"
when :LABEL then
_, space, = get # SPACE
"[#{data}]#{' ' * (space - data.length - 2)}"
when :LALPHA, :NUMBER, :UALPHA then
_, space, = get # SPACE
"#{data}.#{' ' * (space - 2)}"
when :NOTE then
_, space = get # SPACE
"#{data}::#{' ' * (space - data.length - 2)}"
when :TEXT then
data
when :NEWLINE then
unget
break
when nil then
break
else
raise ParseError, "unhandled token #{@current_token.inspect}"
end
end
text
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def tokenize ; end",
"def tokenize ; end",
"def tokenize\n \n end",
"def tokenize; end",
"def tokenize; end",
"def remove_leading_newlines\n @tokens.shift if @tokens[0][0] == \"\\n\"\n end",
"def text_token(text, kind); end",
"def text_token(text, kind); end",
"def do__raw token\r\n... | [
"0.6738388",
"0.6738388",
"0.66725445",
"0.66034937",
"0.66034937",
"0.64914036",
"0.63945186",
"0.63945186",
"0.63693213",
"0.6359327",
"0.62989026",
"0.6160446",
"0.60512424",
"0.6021501",
"0.6021501",
"0.60016537",
"0.5986833",
"0.5967097",
"0.5902801",
"0.58961946",
"0.58... | 0.5282788 | 93 |
Calculates the column and line of the current token based on +offset+. | def token_pos offset
[offset - @line_pos, @line]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def token_pos(byte_offset) # :nodoc:\n [byte_offset - @line_pos, @line]\n end",
"def offset_on_line(offset)\n end",
"def pos_on_line(offset)\n end",
"def line_for_offset(offset)\n end",
"def get_line_and_column_from_chunk(offset)\n if offset.zero?\n return [@chunk_line, @chunk_column]\n e... | [
"0.7708019",
"0.75606894",
"0.750005",
"0.7395713",
"0.7213591",
"0.71804374",
"0.71804374",
"0.7128925",
"0.6958157",
"0.6950198",
"0.688341",
"0.68291587",
"0.68017244",
"0.67835814",
"0.6753973",
"0.6700305",
"0.6678214",
"0.65833074",
"0.6564007",
"0.6564007",
"0.65455973... | 0.82562107 | 0 |
Turns text +input+ into a stream of tokens | def tokenize input
s = StringScanner.new input
@line = 0
@line_pos = 0
until s.eos? do
pos = s.pos
@tokens << case
when s.scan(/\r?\n/) then
token = [:NEWLINE, s.matched, *token_pos(pos)]
@line_pos = s.pos
@line += 1
token
when s.scan(/ +/) then
[:INDENT, s.matched_size, *token_pos(pos)]
when s.scan(/(=+)\s*/) then
level = s[1].length
level = 6 if level > 6
@tokens << [:HEADER, level, *token_pos(pos)]
pos = s.pos
s.scan(/.*/)
[:TEXT, s.matched, *token_pos(pos)]
when s.scan(/^(-{3,}) *$/) then
[:RULE, s[1].length - 2, *token_pos(pos)]
when s.scan(/([*-])\s+/) then
@tokens << [:BULLET, :BULLET, *token_pos(pos)]
[:SPACE, s.matched_size, *token_pos(pos)]
when s.scan(/([a-z]|\d+)\.[ \t]+\S/i) then
list_label = s[1]
width = s.matched_size - 1
s.pos -= 1 # unget \S
list_type = case list_label
when /[a-z]/ then :LALPHA
when /[A-Z]/ then :UALPHA
when /\d/ then :NUMBER
else
raise ParseError, "BUG token #{list_label}"
end
@tokens << [list_type, list_label, *token_pos(pos)]
[:SPACE, width, *token_pos(pos)]
when s.scan(/\[(.*?)\]( +|$)/) then
@tokens << [:LABEL, s[1], *token_pos(pos)]
[:SPACE, s.matched_size, *token_pos(pos)]
when s.scan(/(.*?)::( +|$)/) then
@tokens << [:NOTE, s[1], *token_pos(pos)]
[:SPACE, s.matched_size, *token_pos(pos)]
else s.scan(/.*/)
[:TEXT, s.matched, *token_pos(pos)]
end
end
self
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def tokens\n @tokens ||= scanner.tokenize(input)\n end",
"def process(input_stream)\n debug 'Beginning tokenization of input'\n\n @stream = input_stream\n @stream_char = 1\n\n @output = [] if @state == :root\n\n until @stream.strip.empty?\n tk = tokenize\n @output.append(tk) if t... | [
"0.7559099",
"0.73922145",
"0.73162776",
"0.7213815",
"0.7203481",
"0.71423316",
"0.71423316",
"0.69663167",
"0.6958177",
"0.6958177",
"0.6918021",
"0.6918021",
"0.68163633",
"0.6773894",
"0.6629035",
"0.66260797",
"0.65720826",
"0.65361017",
"0.6473569",
"0.6473569",
"0.6435... | 0.66031986 | 16 |
Returns the current token or +token+ to the token stream | def unget token = @current_token
p :unget => token if @debug
raise Error, 'too many #ungets' if token == @tokens.first
@tokens.unshift token if token
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def current_token\n @stream.current_token\n end",
"def get_token\n @tokenbuf << read_token if @tokenbuf.length == 0\n return @tokenbuf.shift\n end",
"def current_token\n @current_token\n end",
"def current_token\n @tokens[@token_index]\n end",
"def token\n @token\n... | [
"0.81498164",
"0.7948311",
"0.766473",
"0.76168036",
"0.75251657",
"0.7453073",
"0.7432192",
"0.74250376",
"0.7336681",
"0.7336681",
"0.7268622",
"0.7250319",
"0.7189824",
"0.71562195",
"0.7152385",
"0.70746887",
"0.70031375",
"0.6998607",
"0.694673",
"0.6931011",
"0.69286066... | 0.0 | -1 |
:callseq: ratio + obj > new_obj Performs a symbolic sum between a symbolic rational and `obj`, returning a new symbolic object if the operation creates a new tree branch, or a symbolic number if `obj` is a SymDesc::Number. Simplification is automatic. If b is not a symbolic object, a conversion is attempted | def +(b)
b = b.symdescfy
case b
when Infinity
b
when Number
__sum_number b
when BinaryOp
b + self
else
super
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def +(obj)\n return nil unless (self.top) || (self == obj) || (self =~ obj)\n return self - obj.val if obj.is_a? Negative\n return Prod.new(Number.new(2),self) if self == obj\n lft = (obj.is_a? Prod) ? (self.left + obj.left) : (self.left + Number.new(1))\n return Prod.new(lft,self.right) i... | [
"0.5679484",
"0.5571299",
"0.5512486",
"0.5476463",
"0.5472499",
"0.54054844",
"0.53911597",
"0.5385869",
"0.53632283",
"0.5280221",
"0.522032",
"0.51102436",
"0.5104627",
"0.5098812",
"0.50042707",
"0.49911633",
"0.49592084",
"0.49476534",
"0.4887508",
"0.48865855",
"0.48839... | 0.51447797 | 11 |
:callseq: ratio obj > new_obj Performs a symbolic subtraction between a symbolic rational and `obj`, returning a new symbolic object if the operation creates a new tree branch, or a symbolic number if `obj` is a SymDesc::Number. Simplification is automatic. If b is not a symbolic object, a conversion is attempted | def -(b)
b = b.symdescfy
case b
when Infinity
-b
when Number
__sub_number b
when BinaryOp
__sub_binary_op b
when Neg
self + b.argument
else
super
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def -(obj)\n return nil unless (self.top) || (self == obj) || (self =~ obj)\n return self + obj.val if obj.is_a? Negative\n return Number.new 0 if self == obj\n lft = ((self =~ obj) && (obj.is_a? Prod)) ? (self.left - obj.left) : (self.left - Number.new(1))\n return Prod.new(lft,self.right... | [
"0.5755978",
"0.5724076",
"0.5541179",
"0.5344188",
"0.5329774",
"0.5329774",
"0.53283256",
"0.531676",
"0.53081554",
"0.5223408",
"0.51430726",
"0.49782917",
"0.4977166",
"0.48819825",
"0.4876806",
"0.487545",
"0.4857826",
"0.48467252",
"0.48393816",
"0.4826154",
"0.4767695"... | 0.4609807 | 35 |
:callseq: ratio obj > new_obj Performs a symbolic power between a symbolic rational and `obj`, returning a new symbolic object if the operation creates a new tree branch, or a symbolic number if `obj` is a SymDesc::Number. Simplification is automatic. If b is not a symbolic object, a conversion is attempted | def **(b)
b = b.symdescfy
case b
when Infinity
b
when Int
return Ratio.new(@numerator ** b.value, @denominator ** b.value)
else
super
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def /(object)\n #multiplica numerador con numerador y denominador con denominador\n Fraccion.new(@numerador*object.denominador,@denominador*object.numerador)\n end",
"def test_Rational_InstanceMethods_to_r\n\t\tassert_equal(Rational(\"2\"), Rational(\"4/2\").to_r)\n\tend",
"def coerce(other)\n... | [
"0.5513748",
"0.5395761",
"0.5352221",
"0.52719855",
"0.5176099",
"0.5094608",
"0.5078279",
"0.505973",
"0.50355375",
"0.49988765",
"0.49968788",
"0.4973092",
"0.49586266",
"0.4952022",
"0.49476475",
"0.493178",
"0.49262136",
"0.49208266",
"0.49112248",
"0.49032268",
"0.48592... | 0.55113226 | 1 |
:callseq: ratio == obj > true or false Returns true only if the `obj` is a SymDesc::Ratio or a Float or a Rational and it represents the same numeric value | def ==(b)
case b
when Rational, Ratio
(@numerator == b.numerator) &&
(@denominator == b.denominator)
when Float
self == b.symdescfy
else
false
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def eql? r\n if r.is_a? Ratio\n return (self[0] == r[0] && self[1] == r[1])\n elsif r.is_a? Fixnum\n return (self.to_f == r)\n else\n raise TypeError.new(\"Tried to compare a #{r.class} to an HD::Ratio\")\n end\n end",
"def ==(rational)\n end",
"def =~(obj)\n r... | [
"0.6922043",
"0.63706243",
"0.6257363",
"0.6167372",
"0.6077013",
"0.5956101",
"0.58988214",
"0.5849982",
"0.582913",
"0.5806711",
"0.58051425",
"0.5799605",
"0.5765875",
"0.57607144",
"0.56601644",
"0.565542",
"0.565542",
"0.56261075",
"0.5595477",
"0.55935556",
"0.5589719",... | 0.6561627 | 1 |
:callseq: to_s > string to_s(str_io) > str_io If no argument is provided, it returns a string representation of the fraction. If a StringIO object is passed, the string representation is appended to the buffer and the buffer is returned. | def to_s(io = nil)
if io
__io_append(io, @numerator, DIV_ID, @denominator)
else
return "#{@numerator}/#{@denominator}"
end
io
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_rational_to_s\n assert_respond_to(@rat1, :to_s)\n assert_nothing_raised{ @rat1.to_s }\n assert_kind_of(String, @rat1.to_s)\n assert_equal(\"3/4\", @rat1.to_s)\n assert_equal(\"3/4\", @rat7.to_s)\n end",
"def to_s\n return \"#{fraction.to_s}\"\n end",
"def mixed_fract... | [
"0.62411296",
"0.6067417",
"0.6048017",
"0.56619906",
"0.5567879",
"0.5535255",
"0.55082595",
"0.5502486",
"0.54220146",
"0.533182",
"0.533182",
"0.522742",
"0.5187833",
"0.5176933",
"0.5155773",
"0.5132985",
"0.51256114",
"0.50557464",
"0.5048989",
"0.50249916",
"0.50249916"... | 0.71509975 | 0 |
Get the normalized value for the key. If localization is in play the current locale will be appended to the key in MongoDB dot notation. FIXME (Did). This version DOES NOT USE ::I18n.locale directly. See the localized.rb file for more explanation. | def normalized_key(name, serializer)
# serializer && serializer.localized? ? "#{name}.#{::I18n.locale}" : name
serializer&.localized? ? "#{name}.#{::Mongoid::Fields::I18n.locale}" : name
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def normalized_key(name, serializer)\n serializer && serializer.localized? ? \"#{name}.#{::I18n.locale}\" : name\n end",
"def lookup(locale, key, scope = [], separator = nil)\n return unless key\n keys = I18n.send(:normalize_translation_keys, locale, key, scope)\n @moneta[keys.join('.')]\n end"... | [
"0.72772455",
"0.65642506",
"0.62767637",
"0.6186129",
"0.60458326",
"0.6041448",
"0.6041448",
"0.6026203",
"0.6001171",
"0.5980327",
"0.5969849",
"0.59576917",
"0.59545887",
"0.58488494",
"0.5821653",
"0.58135754",
"0.5752824",
"0.57344896",
"0.5678363",
"0.5650589",
"0.5646... | 0.7464962 | 0 |
COM interface method implementations | def Execute(*args)
case args[0]
when CmdAOT
OnAOT(*args)
when CmdItem1
OnItem1(*args)
when CmdButton1
OnButton1(*args)
end
S_OK
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def interface; end",
"def interface; end",
"def invoke\r\n # TODO: rename to more appropriate one 2007/05/10 by shino\r\n raise 'must be implemented in subclasses'\r\n end",
"def implementation; end",
"def implementation; end",
"def interface_methods; end",
"def interface=(_arg0); en... | [
"0.68481",
"0.68481",
"0.65005314",
"0.6432503",
"0.6432503",
"0.63621706",
"0.6340731",
"0.63237387",
"0.6208223",
"0.6166697",
"0.6145106",
"0.61411536",
"0.61411536",
"0.61411536",
"0.61411536",
"0.6103626",
"0.61031777",
"0.61031777",
"0.61031777",
"0.61031777",
"0.609236... | 0.0 | -1 |
COM interface method implementations | def OnCreateUICommand(*args)
uich.QueryInterface(uich.class::IID, args[-1])
S_OK
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def interface; end",
"def interface; end",
"def invoke\r\n # TODO: rename to more appropriate one 2007/05/10 by shino\r\n raise 'must be implemented in subclasses'\r\n end",
"def implementation; end",
"def implementation; end",
"def interface_methods; end",
"def interface=(_arg0); en... | [
"0.68481",
"0.68481",
"0.65005314",
"0.6432503",
"0.6432503",
"0.63621706",
"0.6340731",
"0.63237387",
"0.6208223",
"0.6166697",
"0.6145106",
"0.61411536",
"0.61411536",
"0.61411536",
"0.61411536",
"0.6103626",
"0.61031777",
"0.61031777",
"0.61031777",
"0.61031777",
"0.609236... | 0.0 | -1 |
General handler utility functions | def buildSubmissionVariables(data)
data = data['submission']
data.keys.each do |key|
# Build Up Submission Properties (non hash / arrays)
if !data[key].is_a?(Hash) && !data[key].is_a?(Array)
@variables['submission'][key] = data[key]
end
# Pass Form Object to the Build Form Variables Routine to Handle the Rest
if key == "form"
buildFormVariables({"form" => data[key]})
end
# Build Submission Values Variables
if key == "values"
@variables['values'] = data[key]
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def handler; end",
"def handler; end",
"def handlers; end",
"def handlers; end",
"def handlers; end",
"def handler_method; end",
"def handlers=(_arg0); end",
"def handle; end",
"def handle\n end",
"def handler_base_class; end",
"def errorhandling\n end",
"def setup_handler\n end",
"def h... | [
"0.78852904",
"0.78852904",
"0.7699488",
"0.7699488",
"0.7699488",
"0.7279557",
"0.70928675",
"0.67347205",
"0.63554645",
"0.63363624",
"0.6306352",
"0.6291296",
"0.6282299",
"0.6282299",
"0.6280245",
"0.61602783",
"0.61415493",
"0.6130153",
"0.6130153",
"0.6130153",
"0.61183... | 0.0 | -1 |
This is a template method that is used to escape results values (returned in execute) that would cause the XML to be invalid. This method is not necessary if values do not contain character that have special meaning in XML (&, ", ), however it is a good practice to use it for all return variable results in case the value could include one of those characters in the future. This method can be copied and reused between handlers. | def escape(string)
# Globally replace characters based on the ESCAPE_CHARACTERS constant
string.to_s.gsub(/[&"><]/) { |special| ESCAPE_CHARACTERS[special] } if string
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def escape_xml(value)\n value.to_s.gsub(/[&<>\"']/) { |s| ESCAPE_TABLE[s] } # or /[&<>\"']/\n end",
"def xml_escape(input); end",
"def escape(val)\n return val\n end",
"def escape(value)\n end",
"def xml_escape(input)\n return input.to_s.to_xs\n end",
"def test_ou... | [
"0.6790408",
"0.673146",
"0.6600858",
"0.63993174",
"0.6328012",
"0.62743497",
"0.6160782",
"0.6022911",
"0.5983455",
"0.5983152",
"0.59736365",
"0.58558583",
"0.5832975",
"0.58071023",
"0.57079893",
"0.57021266",
"0.5694263",
"0.5678588",
"0.566588",
"0.55844843",
"0.5570339... | 0.0 | -1 |
convert TeX into PDF | def tex_compile(file)
print File.basename(Dir.pwd) + '/' + file
`platex -kanji="sjis" #{file}.tex`
print "."
`#{$bibtex} #{file}`
print "."
# some system needs three times compiles.
3.times do
`platex -kanji="sjis" #{file}.tex`
print "."
end
`dvipdfmx #{file}.dvi > #{$dev_null} 2>&1`
puts "."
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def to_pdf\n\t\trequire 'erb'\n\t\trequire 'fileutils'\n @self = self\n template = ERB.new(LatexTemplate.where(model: self.class.to_s).first.template)\n directory = \"#{Rails.root}/tmp/pdf/#{self.class.to_s}/\"\n\t\tif not File.directory? directory\n\t\t\tFileUtils.mkpath(directory)\n\t\tend\n\t\tfile = \... | [
"0.7339175",
"0.73134685",
"0.6811696",
"0.67833686",
"0.67824274",
"0.6647562",
"0.6545204",
"0.6417368",
"0.64163077",
"0.6391119",
"0.63190246",
"0.6300978",
"0.6270027",
"0.623456",
"0.6213866",
"0.62063766",
"0.6203311",
"0.6200837",
"0.61946106",
"0.61626494",
"0.615203... | 0.0 | -1 |
compile all TeX files in dir | def compile(dir)
savedir = Dir.pwd
Dir.chdir(dir)
# 中間ファイル削除
clean
# 個別ファイルコンパイル
Dir::glob("*.pdf").each do |file|
next if "nzmath_doc" == file[0...-4]
tex_compile(file[0...-4])
end
header_footer = ["header_overview.tex", "header_basic_util.tex", "header_class.tex",
"header_function.tex", "footer.tex"]
header_footer.map!{|file| '../' + file}
# リネーム
header_footer.each do |file|
File::rename(file, file + '_')
end
# 空ファイル作成
header_footer.each do |file|
open(file, "w") {|f|}
end
begin
# コンパイル
tex_compile("nzmath_doc")
ensure
# リネーム
header_footer.each do |file|
File::rename(file + '_', file)
end
end
ensure
# 中間ファイル削除
clean
Dir.chdir(savedir)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def tex_compile(file)\r\n\tprint File.basename(Dir.pwd) + '/' + file\r\n\t`platex -kanji=\"sjis\" #{file}.tex`\r\n\tprint \".\"\r\n\t`#{$bibtex} #{file}`\r\n\tprint \".\"\r\n\r\n\t# some system needs three times compiles.\r\n\t3.times do\r\n\t\t`platex -kanji=\"sjis\" #{file}.tex`\r\n\t\tprint \".\"\r\n\tend\r\n\r... | [
"0.6738461",
"0.6415091",
"0.6218515",
"0.6178845",
"0.6080331",
"0.5979561",
"0.5928755",
"0.5855256",
"0.5852079",
"0.5778334",
"0.57682055",
"0.5739974",
"0.56925184",
"0.56823117",
"0.5682132",
"0.5673344",
"0.5659808",
"0.5652723",
"0.56107134",
"0.557006",
"0.5547858",
... | 0.7556378 | 0 |
Set the user of the gas simulation | def user
self.full_simulation.user
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_User(value)\n set_input(\"User\", value)\n end",
"def set_User(value)\n set_input(\"User\", value)\n end",
"def set_User(value)\n set_input(\"User\", value)\n end",
"def set_User(value)\n set_input(\"User\", value)\n end",
"def set_User(value)\n ... | [
"0.70538545",
"0.70538545",
"0.70538545",
"0.70538545",
"0.70538545",
"0.70538545",
"0.70538545",
"0.7052837",
"0.69139683",
"0.68261945",
"0.6655212",
"0.6655212",
"0.65319496",
"0.65124714",
"0.6509996",
"0.64686495",
"0.64686495",
"0.6377614",
"0.636727",
"0.6352112",
"0.6... | 0.6078279 | 69 |
This method can estimate the consumption depending on the params you give to it | def estimation(yearly_cost, yearly_consumption, floor_space, heat_type, water_cooking_type, nb_residents )
yearly_cost = yearly_cost.to_f
yearly_consumption = yearly_consumption.to_i
floor_space = floor_space.to_i
nb_residents = nb_residents.to_i
if verify_nilness_params(yearly_cost, yearly_consumption, floor_space, heat_type, water_cooking_type, nb_residents)
first_factor = heat_type == 'Gaz' ? 1 : 0
second_factor = water_cooking_type == 'Gaz' ? 1 : 0
yearly_consumption = floor_space * 100 * first_factor + consumption_people(nb_residents) * second_factor if yearly_consumption.zero?
[yearly_cost, yearly_consumption]
else
[false, -1]
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def effective_rate; end",
"def capacity_to_demand_multiplier\n 1.0\n end",
"def capacity_to_demand_multiplier\n 8760.0\n end",
"def rate_scale; end",
"def estimated_consumption(cr)\n total = 0\n # If real consumption equals zero, try to estimate\n if cr == 0\n # Only e... | [
"0.6671547",
"0.653203",
"0.62655646",
"0.625655",
"0.6216866",
"0.6146086",
"0.604413",
"0.60081655",
"0.59809095",
"0.5969978",
"0.59527034",
"0.59379196",
"0.5934625",
"0.5927867",
"0.5905628",
"0.58848727",
"0.5879764",
"0.58713216",
"0.5864117",
"0.5857021",
"0.58331126"... | 0.55513304 | 62 |
This method execute the comparison between what is entered by the client and the contracts | def comparison(yearly_cost, yearly_consumption)
first_filter = GasContract.all.select { |contract|
yearly_consumption.between?(contract.low_kw_consumption_per_year * 1000, contract.high_kw_consumption_per_year * 1000)
}
second_filter = first_filter.select{ |contract|
yearly_cost > (contract.kwh_price_base * yearly_consumption + contract.subscription_base_price_month * 12)
}
max_save = 0
all_savings = []
second_filter.each do |contract|
savings = yearly_cost - (contract.kwh_price_base * yearly_consumption + contract.subscription_base_price_month * 12)
if savings > max_save
max_save = savings
end
all_savings << savings
end
[max_save.round(2), second_filter, all_savings]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def contract; end",
"def contract; end",
"def compare_contractor_data(contractor)\n message_good = \"Each attribute value is equal to the corresponding attributes of a freelancer #{contractor[:name]} on a profile page:\\n#{contractor}\"\n message_bad = \"Freelancer attribute values not equal to the cor... | [
"0.5922322",
"0.5922322",
"0.58432645",
"0.56738245",
"0.5638366",
"0.5567748",
"0.55164385",
"0.5511663",
"0.5450405",
"0.5410191",
"0.5304728",
"0.52622634",
"0.5238766",
"0.5228185",
"0.51697916",
"0.51577604",
"0.51554716",
"0.5134517",
"0.5097427",
"0.50964075",
"0.50934... | 0.0 | -1 |
This method create all the join table given by the filter and the saving associated with each | def create_join_table_gas(filter, all_savings)
filter.each_with_index do |contract, index|
JoinTableGasSimulationContract.create(gas_simulation: self, gas_contract: contract, savings: all_savings[index])
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create_table_joins(klass)\n if join_tables = klass.ann(:self, :join_tables)\n for info in join_tables\n begin\n # UGGLY hack!\n key_type = klass.ann(:oid, :sql).split(\" \").first\n create_join_table_sql(info, key_type).each do |sql|\n exec(sql, false)\n ... | [
"0.6556048",
"0.6534441",
"0.6457511",
"0.6235183",
"0.6114938",
"0.6044283",
"0.5942229",
"0.5807729",
"0.58055997",
"0.5781895",
"0.57558334",
"0.5721014",
"0.571776",
"0.5715381",
"0.5669762",
"0.566083",
"0.5571541",
"0.55399054",
"0.55355793",
"0.55269796",
"0.5503767",
... | 0.65796465 | 0 |
This method can show the top best contracts depending on the number we want to show | def sort_contracts(how_many)
return_array = []
contracts_sorted = join_table_gas_simulation_contracts.sort_by(&:savings).reverse
how_many.times do |i|
return_array << GasContract.find(contracts_sorted[i].gas_contract_id)
rescue
return_array
end
return_array
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def money_top(driver_sum)\n arr_money = driver_sum.map { |driver| driver[:total_money]}\n money_max = arr_money.max\n money_name = \"\"\n\n driver_sum.each do |item|\n if item[:total_money] == money_max\n money_name << item[:id] << \" \"\n end\n end\n\n money_result = [money_max, money_name]\n r... | [
"0.6581193",
"0.6335129",
"0.6146729",
"0.6145433",
"0.61236113",
"0.6091667",
"0.606354",
"0.6054245",
"0.60077995",
"0.5996436",
"0.59670955",
"0.5957648",
"0.5940633",
"0.5919165",
"0.5895838",
"0.5876413",
"0.5863333",
"0.5850707",
"0.5848036",
"0.58316904",
"0.5827998",
... | 0.5441949 | 73 |
Estimate the consumption per habitant | def consumption_people(nb_residents)
hash = { 1 => 1630, 2 => 2945, 3 => 4265, 4 => 5320, 5 => 6360 }
if hash[nb_residents].nil?
hash[5] + (nb_residents - 5) * 1000
else
hash[nb_residents]
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fuel_consumption(tank)\n return self.mass_flow_rate / tank.fuel_density\n end",
"def fuel_efficiency\n ((mileage - predecessor.mileage) / liter).round(1) if predecessor\n end",
"def profit_calculation\r\n @total_profit = @cocktail_profit + @beer_profit + @water_profit\r\n end",
... | [
"0.64738995",
"0.6460504",
"0.64576095",
"0.6407664",
"0.6324953",
"0.6310428",
"0.6309668",
"0.63042414",
"0.6296991",
"0.62953824",
"0.62562126",
"0.6215982",
"0.6200238",
"0.61958694",
"0.6178777",
"0.61774266",
"0.6176287",
"0.6160859",
"0.6142263",
"0.6128977",
"0.61065"... | 0.0 | -1 |
This method is part of the estimation process It verifies the entries of the client | def verify_nilness_params(yearly_cost, yearly_consumption, floor_space, heat_type, water_cooking_type, nb_residents)
if yearly_cost.zero? # if he forgot the yearly cost
false
else
if yearly_consumption.zero? # if the consumption is not entered, all the other field must be present
if [floor_space, nb_residents].include?(0) || [heat_type, water_cooking_type].include?('')
false
else
true
end
else
true
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def verify\n end",
"def verify\n # nothing to do here, so just return\n end",
"def verify\n create_order\n start_challenge\n wait_verify_status\n check_verify_status\n rescue Acme::Client::Error => e\n retry_on_verify_error(e)\n end",
"def fetch_and_val... | [
"0.6227038",
"0.6166866",
"0.6136668",
"0.6095083",
"0.60388434",
"0.5999311",
"0.58063453",
"0.58043236",
"0.57649106",
"0.5720362",
"0.56577307",
"0.5614118",
"0.55964535",
"0.55964535",
"0.5582783",
"0.55827284",
"0.55825675",
"0.556687",
"0.5527957",
"0.5522332",
"0.55095... | 0.0 | -1 |
Return the URL to the user's profile banner image | def profile_banner_uri(size=:web)
::URI.parse(insecure_uri([@attrs[:profile_banner_url], size].join('/'))) if @attrs[:profile_banner_url]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def profile_url\n if self.profile and self.profile != \"\"\n return self.profile\n end\n return \"/blank.png\"\n end",
"def get_image_url( account_profile ) \n account_profile['profile_image_url']\n end",
"def user_profile_img(user)\n if user.avatar?\n return image_tag(user.avatar... | [
"0.8004414",
"0.7848266",
"0.773669",
"0.763777",
"0.7636071",
"0.7571114",
"0.74976575",
"0.7484197",
"0.7441429",
"0.7433081",
"0.74250734",
"0.739835",
"0.7378658",
"0.73764014",
"0.7341379",
"0.733608",
"0.7283216",
"0.725527",
"0.7251441",
"0.72503185",
"0.7247855",
"0... | 0.7316583 | 16 |
Return the secure URL to the user's profile banner image | def profile_banner_uri_https(size=:web)
::URI.parse([@attrs[:profile_banner_url], size].join('/')) if @attrs[:profile_banner_url]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def profile_banner_url(size=:web)\n insecure_url([@attrs[:profile_banner_url], size].join('/')) if profile_banner_url?\n end",
"def profile_url\n if self.profile and self.profile != \"\"\n return self.profile\n end\n return \"/blank.png\"\n end",
"def profile_image_url(size=:normal)\n ... | [
"0.78130656",
"0.76481104",
"0.75924057",
"0.73992205",
"0.73567855",
"0.7302048",
"0.7253386",
"0.72063124",
"0.7148167",
"0.7110586",
"0.710684",
"0.710684",
"0.70610666",
"0.69632035",
"0.6952374",
"0.6947484",
"0.6927653",
"0.6922039",
"0.69173527",
"0.6890155",
"0.687565... | 0.685851 | 21 |
Return the URL to the user's profile image | def profile_image_uri(size=:normal)
::URI.parse(insecure_uri(profile_image_uri_https(size))) if @attrs[:profile_image_url_https]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def profile_url\n if self.profile and self.profile != \"\"\n return self.profile\n end\n return \"/blank.png\"\n end",
"def image_url\n url_for(object.profile_pic)\n end",
"def get_image_url( account_profile ) \n account_profile['profile_image_url']\n end",
"def user_profile_img(use... | [
"0.83991086",
"0.8366859",
"0.8138298",
"0.8138133",
"0.80715215",
"0.8066858",
"0.8061989",
"0.8019302",
"0.8017834",
"0.7945696",
"0.794209",
"0.7894318",
"0.7863307",
"0.78624856",
"0.78569233",
"0.7840354",
"0.78331876",
"0.78296447",
"0.78228533",
"0.7821407",
"0.7804746... | 0.7244905 | 76 |
Return the secure URL to the user's profile image | def profile_image_uri_https(size=:normal)
# The profile image URL comes in looking like like this:
# https://a0.twimg.com/profile_images/1759857427/image1326743606_normal.png
# It can be converted to any of the following sizes:
# https://a0.twimg.com/profile_images/1759857427/image1326743606.png
# https://a0.twimg.com/profile_images/1759857427/image1326743606_mini.png
# https://a0.twimg.com/profile_images/1759857427/image1326743606_bigger.png
::URI.parse(@attrs[:profile_image_url_https].sub(PROFILE_IMAGE_SUFFIX_REGEX, profile_image_suffix(size))) if @attrs[:profile_image_url_https]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def profile_image_url(size=:normal)\n insecure_url(profile_image_url_https(size)) if profile_image_url?\n end",
"def profile_url\n if self.profile and self.profile != \"\"\n return self.profile\n end\n return \"/blank.png\"\n end",
"def image_url\n url_for(object.profile_pic)\n end",... | [
"0.81058383",
"0.7958155",
"0.78643423",
"0.7643549",
"0.75786215",
"0.7572952",
"0.75723356",
"0.75454056",
"0.74461323",
"0.74282527",
"0.74064827",
"0.74060076",
"0.74060076",
"0.7391989",
"0.73644364",
"0.7363571",
"0.7362",
"0.73546225",
"0.7346148",
"0.7340833",
"0.7338... | 0.6887495 | 62 |
Determines whether the NFA accepts a given string. | def accepts?(input)
resp = feed(input)
resp[:accept]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def isNFA?\n if isValid? then \n if @alphabet.include? \"\" then\n return true\n else\n combo = []\n @transition_rules.each do |transition|\n combo.push([transition.current_state, transition.input_char])\n ... | [
"0.6675932",
"0.6577142",
"0.6505087",
"0.64102805",
"0.63964826",
"0.6392033",
"0.63919264",
"0.62675434",
"0.6210856",
"0.6210753",
"0.615116",
"0.6150965",
"0.6089369",
"0.6027092",
"0.60260725",
"0.6019783",
"0.60065895",
"0.60019",
"0.6001774",
"0.5995471",
"0.59788066",... | 0.0 | -1 |
Determines whether or not any transition states exist given a beginning state and input symbol pair. | def has_transition?(state, symbol)
return false unless @transitions.include? state
@transitions[state].has_key? symbol
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def accepts_zero_characters(start_state, end_state)\n marked_states = Set.new\n state_stack = [start_state]\n while !state_stack.empty?\n state = state_stack.pop\n next if marked_states.include? state.id\n marked_states.add(state.id)\n return true if state.id == end_state.id\n sta... | [
"0.6617021",
"0.65058094",
"0.6217322",
"0.62026346",
"0.60841686",
"0.60547996",
"0.59882087",
"0.5873334",
"0.58653444",
"0.58478713",
"0.5809739",
"0.57889265",
"0.5787568",
"0.5771664",
"0.5749269",
"0.5711251",
"0.56665385",
"0.56665385",
"0.5664258",
"0.5635463",
"0.562... | 0.7356532 | 0 |
Determines if a given state is an accept state. | def accept_state?(state)
@accept.include? state
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_accept_state?(state)\n @accept.include? state.to_s\n end",
"def accepted?\n state == 'accepted'\n end",
"def can_accept?\n !accepted? && !rejected? && !withdrawn?\n end",
"def accepting?\n\treturn self.type == \"Accepting\"\n end",
"def accepted?\n return (self.status == ... | [
"0.88380766",
"0.79399586",
"0.68606234",
"0.67599756",
"0.65416485",
"0.64303",
"0.63275045",
"0.6324492",
"0.63241726",
"0.6242839",
"0.6232949",
"0.621452",
"0.621452",
"0.6202336",
"0.61447644",
"0.60632914",
"0.60215425",
"0.5997094",
"0.5952792",
"0.59490377",
"0.593947... | 0.87743056 | 1 |
override with a better message | def apply_summary
file = @file ? File.basename(@file) : 'SIMP server <host>.yaml'
"Disabling of duplicate OS & SIMP YUM repos in #{file} #{@applied_status}"
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def msg; end",
"def msg; end",
"def msg; end",
"def msg=(_); end",
"def msg=(_); end",
"def message\n @message || super\n end",
"def default_messages=(_arg0); end",
"def message; end",
"def message; end",
"def message; end",
"def message; end",
"def message; end",
"def message; end"... | [
"0.7049173",
"0.7049173",
"0.7049173",
"0.7004298",
"0.7004298",
"0.6855491",
"0.67363703",
"0.66996366",
"0.66996366",
"0.66996366",
"0.66996366",
"0.66996366",
"0.66996366",
"0.66343576",
"0.65666604",
"0.6540973",
"0.65294886",
"0.6514811",
"0.65086156",
"0.65086156",
"0.6... | 0.0 | -1 |
Pass on certain authorize_params to the Slack authorization GET request. See | def authorize_params
super.tap do |params|
%w(scope team redirect_uri).each do |v|
if !request.params[v].to_s.empty?
params[v.to_sym] = request.params[v]
end
end
log(:debug, "Authorize_params #{params.to_h}")
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def authorize_request\n if basic_auth.values.all?\n [:basic_auth, @username, @password]\n else\n [:desk_oauth, oauth]\n end\n end",
"def authorize\n params[:access_token] ||= params[:oauth_token]\n super\n end",
"def authorize_params\n super.tap do |params|\n ... | [
"0.7459845",
"0.71612453",
"0.71542764",
"0.715173",
"0.70455563",
"0.6966972",
"0.680541",
"0.67030513",
"0.67030513",
"0.6687164",
"0.6647343",
"0.656474",
"0.64518327",
"0.6423493",
"0.64209443",
"0.6316849",
"0.6271404",
"0.6212525",
"0.62081057",
"0.61979944",
"0.6179658... | 0.7246134 | 1 |
Get a new OAuth2::Client and define custom behavior. overrides previous omniauthstrategiesoauth2 :client definition. Log API requests with OmniAuth.logger | def client
new_client = super
team_domain = request.params['team_domain'] || options[:team_domain]
if !team_domain.to_s.empty?
site_uri = URI.parse(options[:client_options]['site'])
site_uri.host = "#{team_domain}.slack.com"
new_client.site = site_uri.to_s
log(:debug, "Oauth site uri with custom team_domain #{site_uri}")
end
st_raw_info = raw_info
new_client.define_singleton_method(:request) do |*args|
OmniAuth.logger.send(:debug, "(slack) API request #{args[0..1]}; in thread #{Thread.current.object_id}.")
request_output = super(*args)
uri = args[1].to_s.gsub(/^.*\/([^\/]+)/, '\1') # use single-quote or double-back-slash for replacement.
st_raw_info[uri.to_s]= request_output
request_output
end
new_client
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def client\n # Merge in authorize url if supplied\n options.authorize_params[:clientId] = options.client_id\n options.authorize_params[:redirect] = callback_url\n options.client_options[:authorize_url] = options.authorize_url if options.authorize_url.present?\n options.client_opt... | [
"0.7071465",
"0.6915986",
"0.6905785",
"0.6855409",
"0.67066187",
"0.6700351",
"0.669656",
"0.66850513",
"0.6615154",
"0.65906507",
"0.65702647",
"0.65653515",
"0.64839405",
"0.64443237",
"0.6426523",
"0.63746375",
"0.63389695",
"0.63172853",
"0.6254279",
"0.6240883",
"0.6229... | 0.63190436 | 17 |
Dropping query_string from callback_url prevents some errors in call to /api/oauth.v2.access. | def callback_url
full_host + script_name + callback_path
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def callback_url\n if @authorization_code_from_signed_request_in_cookie\n ''\n else\n # Fixes regression in omniauth-oauth2 v1.4.0 by https://github.com/intridea/omniauth-oauth2/commit/85fdbe117c2a4400d001a6368cc359d88f40abc7\n options[:callback_url] || (full_host + script_... | [
"0.70621866",
"0.6864417",
"0.6837393",
"0.6717086",
"0.6713367",
"0.6649983",
"0.647477",
"0.647477",
"0.6409368",
"0.63627714",
"0.6346695",
"0.632082",
"0.6275508",
"0.62429696",
"0.62285316",
"0.62277853",
"0.6184288",
"0.6184288",
"0.6184288",
"0.61700106",
"0.616855",
... | 0.57241046 | 56 |
Get a mutex specific to the calling method. This operation is synchronized with its own mutex. | def semaphore(method_name = caller[0][/`([^']*)'/, 1])
@main_semaphore.synchronize {
@semaphores[method_name] ||= Mutex.new
}
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def mutex\n return @__mutex if @__mutex\n synchronized(self.class) {\n # check again: by the time we get into this synchronized block\n # some other thread might have already created the mutex.\n @__mutex = @__mutex || Mutex.new\n }\n end",
"def mutex\n @mutex\n end",
"def mute... | [
"0.69130665",
"0.6821464",
"0.66776854",
"0.55062217",
"0.55062217",
"0.55062217",
"0.55062217",
"0.5275682",
"0.5149322",
"0.51008916",
"0.5064437",
"0.5064437",
"0.5058152",
"0.49977008",
"0.49760187",
"0.49593088",
"0.4954641",
"0.49224377",
"0.49222884",
"0.49150985",
"0.... | 0.5997835 | 3 |
Preload additional api calls with a pool of threads. | def preload_data_with_threads(num_threads)
return unless num_threads > 0
preload_methods = active_methods.concat(options[:additional_data].keys)
log :info, "Preloading (#{preload_methods.size}) data requests using (#{num_threads}) threads."
work_q = Queue.new
preload_methods.each{|x| work_q.push x }
workers = num_threads.to_i.times.map do
Thread.new do
begin
while x = work_q.pop(true)
log :debug, "Preloading #{x}."
send x
end
rescue ThreadError
end
end
end
workers.map(&:join); "ok"
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fetch_apis_asynchronously \n threads = []\n threads << Thread.new { @resp1 = RestClient.get 'https://reqres.in/api/unknown' }\n threads << Thread.new { @resp2 = RestClient.get 'https://reqres.in/api/products' }\n threads.each { |thr| thr.join } \n end",
"de... | [
"0.6453118",
"0.6229834",
"0.6211408",
"0.5797853",
"0.5561706",
"0.5507962",
"0.55022454",
"0.54906505",
"0.5468577",
"0.54551",
"0.54029375",
"0.53926635",
"0.53891283",
"0.53809327",
"0.5373321",
"0.53664947",
"0.5329434",
"0.5303737",
"0.53003293",
"0.5297891",
"0.5297891... | 0.72898734 | 0 |
Define methods for addional data from :additional_data option | def define_additional_data
hash = options[:additional_data]
if !hash.to_h.empty?
hash.each do |k,v|
define_singleton_method(k) do
instance_variable_get(:"@#{k}") ||
instance_variable_set(:"@#{k}", v.respond_to?(:call) ? v.call(env) : v)
end
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def do_extended_data(type, data); end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = valu... | [
"0.7356935",
"0.72789276",
"0.72592163",
"0.7225805",
"0.7225805",
"0.7225805",
"0.7225805",
"0.7225805",
"0.7225805",
"0.7225805",
"0.7225805",
"0.7172823",
"0.7172823",
"0.71564835",
"0.71564835",
"0.7143333",
"0.7143333",
"0.7143333",
"0.7129505",
"0.7129505",
"0.7129505",... | 0.7723744 | 0 |
Parsed data returned from /slack/oauth.v2.access api call. | def auth
@auth ||= access_token.params.to_h.merge({'token' => access_token.token})
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def auth_test(access_token:)\n response = HTTParty.post(\"https://slack.com/api/auth.test\", headers: { 'Authorization': \"Bearer #{access_token}\" })\n JSON.parse(response.body, symbolize_names: true)\n end",
"def get_acc_info\n JSON.parse(curl_get(\"/api2/account/info/\").body_str)\n end",
"d... | [
"0.63821006",
"0.6263636",
"0.6097347",
"0.6082701",
"0.60452414",
"0.6038126",
"0.59176666",
"0.5915798",
"0.5896134",
"0.5895652",
"0.58949816",
"0.5854907",
"0.5822499",
"0.5799759",
"0.57996255",
"0.5776106",
"0.57636863",
"0.5726551",
"0.57002944",
"0.5683768",
"0.568060... | 0.0 | -1 |
API call to get user permissions for workspace token. This is needed because workspace token 'signinwithslack' is missing scopes in the :scope field (acknowledged issue in developer preview). Returns [: ] | def apps_permissions_users_list
return {} unless !skip_info? && is_app_token? && is_not_excluded?
semaphore.synchronize {
@apps_permissions_users_list_raw ||= access_token.get('/api/apps.permissions.users.list')
@apps_permissions_users_list ||= @apps_permissions_users_list_raw.parsed['resources'].inject({}){|h,i| h[i['id']] = i; h}
}
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def findProjectPermissions(workspace, user)\n query_result = @rally.find(:project_permission, :fetch => true, :pagesize => 100) {\n equal :\"user.login_name\", user.login_name\n }\n \n projectPermissions = []\n query_result.each { |pp|\n if ( pp.project.workspace == workspace)\n pro... | [
"0.6519009",
"0.59344774",
"0.5825381",
"0.5773829",
"0.57732075",
"0.56472045",
"0.55737215",
"0.55067277",
"0.5487154",
"0.5486564",
"0.54475594",
"0.54267025",
"0.54189837",
"0.53312325",
"0.5323245",
"0.53118074",
"0.5244398",
"0.5183052",
"0.51570153",
"0.5150469",
"0.51... | 0.473925 | 59 |
Is this a workspace app token? | def is_app_token?
auth['token_type'].to_s == 'app'
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_app_token?\n case\n when params['token_type'] == 'app' || token.to_s[/^xoxa/]\n true\n when token.to_s[/^xoxp/]\n false\n else\n nil\n end\n end",
"def token_is_for_master?\n token_account['name'] == 'mas... | [
"0.771789",
"0.7274672",
"0.72172874",
"0.71566266",
"0.7029004",
"0.6997016",
"0.6992928",
"0.6986241",
"0.69260824",
"0.6862502",
"0.6833186",
"0.68061197",
"0.68061197",
"0.6802385",
"0.6777616",
"0.67638683",
"0.6733154",
"0.6713921",
"0.67084825",
"0.66970694",
"0.668565... | 0.80090237 | 0 |
Scopes come from at least 3 different places now. The classic :scope field (string) New workshop token :scopes field (hash) Separate call to apps.permissions.users.list (array) This returns hash of workspace scopes, with classic & new identity scopes in :identity. Lists of scopes are in array form. | def all_scopes
@all_scopes ||=
{'identity' => (auth['scope'] || apps_permissions_users_list[user_id].to_h['scopes'].to_a.join(',')).to_s.split(',')}
.merge(auth['scopes'].to_h)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def all_scopes(_user_id=nil)\n debug{\"_user_id: #{_user_id}, @all_scopes: #{@all_scopes}\"}\n if _user_id && !@all_scopes.to_h.has_key?('identity') || @all_scopes.nil?\n @all_scopes = (\n scopes = case\n when params['scope']\n {'classic' =>... | [
"0.6774902",
"0.67735314",
"0.67023057",
"0.66939336",
"0.66305935",
"0.64769953",
"0.64670205",
"0.6418563",
"0.6394966",
"0.6382104",
"0.6324182",
"0.63023543",
"0.6266658",
"0.6254284",
"0.6243248",
"0.6169198",
"0.6147463",
"0.60793805",
"0.6078875",
"0.6004791",
"0.59846... | 0.74947 | 0 |
Determine if given scopes exist in current authorization. Scopes is hash where key == scope type val == array or string of individual scopes. | def has_scope?(**scopes_hash)
scopes_hash.detect do |section, scopes|
test_scopes = case
when scopes.is_a?(String); scopes.split(',')
when scopes.is_a?(Array); scopes
else raise "Scope must be a string or array"
end
test_scopes.detect do |scope|
all_scopes[section.to_s].to_a.include?(scope.to_s)
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def has_any_scope?(*scopes)\n return true if scopes.empty?\n Array(scopes.flatten).any?{ |scope| self.scopes.include?(scope) }\n end",
"def has_scope?(*scopes)\n return true if scopes.empty?\n Array(scopes.flatten).none?{ |scope| !self.scopes.include?(scope) }\n end",
"def acceptable?(scopes)\n ... | [
"0.7910931",
"0.7870426",
"0.76238036",
"0.75419444",
"0.7368402",
"0.7138306",
"0.7138306",
"0.7121337",
"0.70674014",
"0.6865917",
"0.6765733",
"0.6726917",
"0.6719299",
"0.662583",
"0.6595154",
"0.6591353",
"0.6482237",
"0.6383847",
"0.62659955",
"0.62583",
"0.61807144",
... | 0.8136706 | 0 |
helper methods go here! | def get_item(item)
@cache.each do |(k,v)|
return [k, v] if v == item
end
nil
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def private; end",
"def schubert; end",
"def probers; end",
"def suivre; end",
"def weber; end",
"def specie; end",
"def specie; end",
"def specie; end",
"def specie; end",
"def who_we_are\r\n end",
"def custom; end",
"def custom; end",
"def helpers; end",
"def helpers; end",
"def help... | [
"0.7287261",
"0.66014576",
"0.6562698",
"0.6280189",
"0.6234601",
"0.62198305",
"0.62198305",
"0.62198305",
"0.62198305",
"0.61928034",
"0.61432976",
"0.61432976",
"0.6137897",
"0.6137897",
"0.6137897",
"0.6112013",
"0.6112013",
"0.6097403",
"0.60813224",
"0.6008863",
"0.5990... | 0.0 | -1 |
some metaprogramming to simplify adding new fields | def method_missing(sym, *args, &block)
str = sym.to_s
matches = nil
if str == "[]"
str = args.shift
elsif str == "[]="
str = "#{ args.shift }="
end
if @@fields.include?(str)
@hash[str]
elsif matches = str.match(/(.*)=$/) and @@fields.include?(matches[1])
@hash[matches[1]] = args.first
else
super(sym, *args, &block)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def add_fields(fields)\n\t\tend",
"def computed_fields; end",
"def method_missing(meth, *args, &blk)\n super unless Fields.all.include?(meth)\n field meth, *args\n end",
"def add_field(field)\n\t\tend",
"def update_fields(fields)\n\n # Also consider extracting this common code between ve... | [
"0.70557165",
"0.6788116",
"0.6718048",
"0.6670186",
"0.6460089",
"0.6455317",
"0.64046526",
"0.63169235",
"0.62844354",
"0.6280383",
"0.6267154",
"0.6267154",
"0.6267154",
"0.6246747",
"0.62184966",
"0.6150756",
"0.6150756",
"0.6150756",
"0.61325437",
"0.6117017",
"0.6097721... | 0.0 | -1 |
method_missing approach won't work with id do it manually | def id
@hash["id"]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def method_missing(id, *attr, &block); end",
"def method_missing(method_id, object)\n return object(object)\n end",
"def method_missing(meth, *args, &blk) \n\t\traise NoMethodError if (meth.to_s == 'id')\n\t\t\t\n\t\tself.bien[0].send(meth, *args, &blk)\n\t\t\n\t\trescue NoMethodError\n\t\t\tsuper\n\te... | [
"0.79756457",
"0.73780227",
"0.72981566",
"0.72128934",
"0.71858305",
"0.71588576",
"0.7133411",
"0.71270055",
"0.71002245",
"0.71002245",
"0.70505065",
"0.7048576",
"0.7006543",
"0.7006543",
"0.7006543",
"0.6991585",
"0.6961754",
"0.6961754",
"0.695989",
"0.69531775",
"0.694... | 0.0 | -1 |
define the name that a user will see, this method may be deprecated as the display name in PAT comes from the name field in measure.xml | def name
return 'AedgSmallToMediumOfficeRoofConstruction'
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def display_name \n username\n end",
"def display_name\n username\n end",
"def name\r\n\t\t@usr_name\r\n\tend",
"def name\r\n\t\t@usr_name\r\n\tend",
"def user_display_name=(value)\n @user_display_name = value\n end",
"def user_display_name=(value)\n @user... | [
"0.7340131",
"0.73166376",
"0.7264374",
"0.7264374",
"0.7244726",
"0.7244726",
"0.7244726",
"0.7229244",
"0.72220355",
"0.72023845",
"0.71597034",
"0.715259",
"0.71449405",
"0.7139302",
"0.7093882",
"0.708272",
"0.7072226",
"0.7061353",
"0.70334023",
"0.702734",
"0.70249605",... | 0.0 | -1 |
define the arguments that the user will input | def arguments(model)
args = OpenStudio::Measure::OSArgumentVector.new
# make an argument for material and installation cost
material_cost_insulation_increase_ip = OpenStudio::Measure::OSArgument.makeDoubleArgument('material_cost_insulation_increase_ip', true)
material_cost_insulation_increase_ip.setDisplayName('Increase Cost per Area of Construction Where Insulation was Improved ($/ft^2).')
material_cost_insulation_increase_ip.setDefaultValue(0.0)
args << material_cost_insulation_increase_ip
# make an argument for material and installation cost
material_cost_sri_increase_ip = OpenStudio::Measure::OSArgument.makeDoubleArgument('material_cost_sri_increase_ip', true)
material_cost_sri_increase_ip.setDisplayName('Increase Cost per Area of Construction Where Solar Reflectance Index (SRI) was Improved. ($/ft^2).')
material_cost_sri_increase_ip.setDefaultValue(0.0)
args << material_cost_sri_increase_ip
return args
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def arguments; end",
"def arguments; end",
"def arguments; end",
"def arguments\n \"\"\n end",
"def args; end",
"def args; end",
"def args; end",
"def args; end",
"def args; end",
"def args; end",
"def args; end",
"def args; end",
"def args; end",
"def args; end",
"def args; end... | [
"0.73753476",
"0.73753476",
"0.73753476",
"0.70890766",
"0.7008301",
"0.7008301",
"0.7008301",
"0.7008301",
"0.7008301",
"0.7008301",
"0.7008301",
"0.7008301",
"0.7008301",
"0.7008301",
"0.7008301",
"0.7008301",
"0.7008301",
"0.7008301",
"0.7008301",
"0.7008301",
"0.7008301",... | 0.0 | -1 |
define what happens when the measure is run | def run(model, runner, user_arguments)
super(model, runner, user_arguments)
# use the built-in error checking
if !runner.validateUserArguments(arguments(model), user_arguments)
return false
end
# assign the user inputs to variables
material_cost_insulation_increase_ip = runner.getDoubleArgumentValue('material_cost_insulation_increase_ip', user_arguments)
material_cost_sri_increase_ip = runner.getDoubleArgumentValue('material_cost_sri_increase_ip', user_arguments)
# no validation needed for cost inputs, negative values are fine, however negative would be odd choice since this measure only improves vs. decreases insulation and SRI performance
# global variables for costs
expected_life = 25
years_until_costs_start = 0
material_cost_insulation_increase_si = OpenStudio.convert(material_cost_insulation_increase_ip, '1/ft^2', '1/m^2').get
material_cost_sri_increase_si = OpenStudio.convert(material_cost_sri_increase_ip, '1/ft^2', '1/m^2').get
running_cost_insulation = 0
running_cost_sri = 0
# prepare rule hash
rules = [] # climate zone, roof type, thermal transmittance (Btu/h·ft2·°F), SRI
# IEAD
rules << ['1', 'IEAD', 0.048, 78.0] # R-20.0 ci.
rules << ['2', 'IEAD', 0.039, 78.0] # R-25.0 ci.
rules << ['3', 'IEAD', 0.039, 78.0] # R-25.0 ci.
rules << ['4', 'IEAD', 0.032, 0] # R-30.0 ci., SRI Comply with Standard 90.1
rules << ['5', 'IEAD', 0.032, 0] # R-30.0 ci., SRI Comply with Standard 90.1
rules << ['6', 'IEAD', 0.032, 0] # R-30.0 ci., SRI Comply with Standard 90.1
rules << ['7', 'IEAD', 0.028, 0] # R-35.0 ci., SRI Comply with Standard 90.1
rules << ['8', 'IEAD', 0.028, 0] # R-35.0 ci., SRI Comply with Standard 90.1
# Attic
rules << ['1', 'Attic', 0.027, 78.0] # R-38.0
rules << ['2', 'Attic', 0.027, 78.0] # R-38.0
rules << ['3', 'Attic', 0.027, 78.0] # R-38.0
rules << ['4', 'Attic', 0.021, 0] # R-49.0, SRI Comply with Standard 90.1
rules << ['5', 'Attic', 0.021, 0] # R-49.0, SRI Comply with Standard 90.1
rules << ['6', 'Attic', 0.021, 0] # R-49.0, SRI Comply with Standard 90.1
rules << ['7', 'Attic', 0.017, 0] # R-60.0, SRI Comply with Standard 90.1
rules << ['8', 'Attic', 0.017, 0] # R-60.0, SRI Comply with Standard 90.1
# Metal
rules << ['1', 'Metal', 0.041, 78.0] # R-19.0 + R-10.0 FC (confirm same change as K12 using 0.041 vs. 0.057)
rules << ['2', 'Metal', 0.041, 78.0] # R-19.0 + R-10.0 FC (confirm same change as K12 using 0.041 vs. 0.057)
rules << ['3', 'Metal', 0.041, 78.0] # R-19.0 + R-10.0 FC (confirm same change as K12 using 0.041 vs. 0.057)
rules << ['4', 'Metal', 0.035, 0] # R-19.0 + R-11 Ls, SRI Comply with Standard 90.1
rules << ['5', 'Metal', 0.031, 0] # R-25.0 + R-11 Ls, SRI Comply with Standard 90.1
rules << ['6', 'Metal', 0.031, 0] # R-25.0 + R-11 Ls, SRI Comply with Standard 90.1
rules << ['7', 'Metal', 0.029, 0] # R-30.0 + R-11 Ls, SRI Comply with Standard 90.1
rules << ['8', 'Metal', 0.026, 0] # R-25.0 + R-11 + R-11 Ls, SRI Comply with Standard 90.1
# make rule hash for cleaner code
rulesHash = {}
rules.each do |rule|
rulesHash["#{rule[0]} #{rule[1]}"] = { 'conductivity_ip' => rule[2], 'sri' => rule[3] }
end
# get climate zone
climateZoneNumber = OsLib_AedgMeasures.getClimateZoneNumber(model, runner)
# climateZoneNumber = "4" # this is just in for quick testing of different climate zones
# add message for climate zones 4-8 about SRI (while the office AEDG doesn't mention this in table like the K-12 AEDG does, still seems like relevant message.)
if climateZoneNumber == false
return false
elsif climateZoneNumber.to_f > 3
runner.registerInfo("For Climate Zone #{climateZoneNumber} Solar Reflectance Index (SRI) should comply with Standard 90.1.")
end
# get starting r-value and SRI ranges
startingRvaluesExtRoof = []
startingRvaluesAtticInterior = []
startingSriExtRoof = []
# flag for roof surface type for tips
ieadFlag = false
metalFlag = false
atticFlag = false
# affected area counter
insulation_affected_area = 0
sri_affected_area = 0
# construction hashes (construction is key, value is array [thermal transmittance (Btu/h·ft2·°F), SRI,rule thermal transmittance (Btu/h·ft2·°F), rule SRI,classification string)
ieadConstructions = {}
metalConstructions = {}
atticConstructions = {} # will initially load all constructions used in model, and will delete later if passes test
# this contains constructions that should not have exterior roofs assigned
otherConstructions = []
# make array for spaces that have a surface with at least one exterior attic surface
atticSpaces = []
# loop through constructions
constructions = model.getConstructions
constructions.each do |construction|
# skip if not used
next if construction.getNetArea <= 0
# skip if not opaque
next if !construction.isOpaque
# get construction and standard
constructionStandard = construction.standardsInformation
# get roof type
intendedSurfaceType = constructionStandard.intendedSurfaceType
constructionType = constructionStandard.standardsConstructionType
# get conductivity
conductivity_si = construction.thermalConductance.get
r_value_ip = OpenStudio.convert(1 / conductivity_si, 'm^2*K/W', 'ft^2*h*R/Btu').get
# get SRI (only need of climate zones 1-3)
sri = OsLib_Constructions.getConstructionSRI(construction)
# flags for construction loop
ruleRvalueFlag = true
ruleSriFlag = true
# IEAD and Metal roofs should have intendedSurfaceType of ExteriorRoof
if intendedSurfaceType.to_s == 'ExteriorRoof'
if constructionType.to_s == 'IEAD'
# store starting values
startingRvaluesExtRoof << r_value_ip
startingSriExtRoof << sri
ieadFlag = true
# test construction against rules
ruleSet = rulesHash["#{climateZoneNumber} IEAD"]
if 1 / r_value_ip > ruleSet['conductivity_ip']
ruleRvalueFlag = false
end
if sri < ruleSet['sri']
ruleSriFlag = false
end
if !ruleRvalueFlag || !ruleSriFlag
ieadConstructions[construction] = { 'conductivity_ip' => 1 / r_value_ip, 'sri' => sri, 'transmittance_ip_rule' => ruleSet['conductivity_ip'], 'sri_rule' => ruleSet['sri'], 'classification' => 'ieadConstructions' }
end
elsif constructionType.to_s == 'Metal'
# store starting values
startingRvaluesExtRoof << r_value_ip
startingSriExtRoof << sri
metalFlag = true
# test construction against rules
ruleSet = rulesHash["#{climateZoneNumber} Metal"]
if 1 / r_value_ip > ruleSet['conductivity_ip']
ruleRvalueFlag = false
end
if sri < ruleSet['sri']
ruleSriFlag = false
end
if !ruleRvalueFlag || !ruleSriFlag
metalConstructions[construction] = { 'conductivity_ip' => 1 / r_value_ip, 'sri' => sri, 'transmittance_ip_rule' => ruleSet['conductivity_ip'], 'sri_rule' => ruleSet['sri'], 'classification' => 'metalConstructions' }
end
else
# create warning if a construction passing through here is used on a roofCeiling surface with a boundary condition of "Outdoors"
otherConstructions << construction
end
elsif (intendedSurfaceType.to_s == 'AtticRoof') || (intendedSurfaceType.to_s == 'AtticWall') || (intendedSurfaceType.to_s == 'AtticFloor')
# store starting values
atticFlag = true
atticConstructions[construction] = { 'conductivity_ip' => 1 / r_value_ip, 'sri' => sri } # will extend this hash later
else
# create warning if a construction passing through here is used on a roofCeiling surface with a boundary condition of "Outdoors"
otherConstructions << construction
end
end
# create warning if construction used on exterior roof doesn't have a surface type of "ExteriorRoof", or if constructions tagged to be used as roof, are used on other surface types
otherConstructionsWarned = []
atticSurfaces = [] # to test against attic spaces later on
surfaces = model.getSurfaces
surfaces.each do |surface|
if !surface.construction.empty?
construction = surface.construction.get
# populate attic spaces
if (surface.outsideBoundaryCondition == 'Outdoors') && atticConstructions.include?(construction)
if !surface.space.empty?
if !atticSpaces.include? surface.space.get
atticSpaces << surface.space.get
end
end
elsif atticConstructions.include? construction
atticSurfaces << surface
end
if (surface.outsideBoundaryCondition == 'Outdoors') && (surface.surfaceType == 'RoofCeiling')
if otherConstructions.include?(construction) && (!otherConstructionsWarned.include? construction)
runner.registerWarning("#{construction.name} is used on one or more exterior roof surfaces but has an intended surface type or construction type not recognized by this measure. As we can not infer the proper performance target, this construction will not be altered.")
otherConstructionsWarned << construction
end
else
if ieadConstructions.include?(construction) || metalConstructions.include?(construction)
runner.registerWarning("#{surface.name} uses #{construction.name} as a construction that this measure expects to be used for exterior roofs. This surface has a type of #{surface.surfaceType} and a a boundary condition of #{surface.outsideBoundaryCondition}. This may result in unexpected changes to your model.")
end
end
end
end
# hashes to hold classification of attic surfaces
atticSurfacesInterior = {} # this will include paris of matched surfaces
atticSurfacesExteriorExposed = {}
atticSurfacesExteriorExposedNonRoof = {}
atticSurfacesOtherAtticDemising = {}
# look for attic surfaces that are not in attic space or matched to them.
atticSpaceWarning = false
atticSurfaces.each do |surface|
if !surface.space.empty?
space = surface.space.get
if !atticSpaces.include? space
if surface.outsideBoundaryCondition == 'Surface'
# get space of matched surface and see if it is also an attic
next if surface.adjacentSurface.empty?
adjacentSurface = surface.adjacentSurface.get
next if adjacentSurface.space.empty?
adjacentSurfaceSpace = adjacentSurface.space.get
if !atticSpaces.include? adjacentSurfaceSpace
atticSpaceWarning = true
end
else
atticSpaceWarning = true
end
end
end
end
if atticSpaceWarning
runner.registerWarning("#{surface.name} uses #{construction.name} as a construction that this measure expects to be used for attics. This surface has a type of #{surface.surfaceType} and a a boundary condition of #{surface.outsideBoundaryCondition}. This may result in unexpected changes to your model.")
end
# flag for testing
interiorAtticSurfaceInSpace = false
# loop through attic spaces to classify surfaces with attic intended surface type
atticSpaces.each do |atticSpace|
atticSurfaces = atticSpace.surfaces
# array for surfaces that don't use an attic construction
surfacesWithNonAtticConstructions = []
# loop through attic surfaces
atticSurfaces.each do |atticSurface|
next if atticSurface.construction.empty?
construction = atticSurface.construction.get
if atticConstructions.include? construction
conductivity_ip = atticConstructions[construction]['conductivity_ip']
r_value_ip = 1 / conductivity_ip
sri = atticConstructions[construction]['sri']
else
surfacesWithNonAtticConstructions << atticSurface.name
next
end
# warn if any exterior exposed roof surfaces are not attic.
if atticSurface.outsideBoundaryCondition == 'Outdoors'
# only want to change SRI if it is a roof
if atticSurface.surfaceType == 'RoofCeiling'
# store starting value for SRI
startingSriExtRoof << sri
atticSurfacesExteriorExposed[atticSurface] = construction
else
atticSurfacesExteriorExposedNonRoof[atticSurface] = construction
end
elsif atticSurface.outsideBoundaryCondition == 'Surface'
# get space of matched surface and see if it is also an attic
next if atticSurface.adjacentSurface.empty?
adjacentSurface = atticSurface.adjacentSurface.get
next if adjacentSurface.space.empty?
adjacentSurfaceSpace = adjacentSurface.space.get
if atticSpaces.include?(adjacentSurfaceSpace) && atticSpaces.include?(atticSpace)
atticSurfacesOtherAtticDemising[atticSurface] = construction
else
# store starting values
startingRvaluesAtticInterior << r_value_ip
atticSurfacesInterior[atticSurface] = construction
interiorAtticSurfaceInSpace = true # this is to confirm that space has at least one interior surface flagged as an attic
end
else
runner.registerWarning("Can't infer use case for attic surface with an outside boundary condition of #{atticSurface.outsideBoundaryCondition}.")
end
end
# warning message for each space that has mix of attic and non attic constructions
runner.registerWarning("#{atticSpace.name} has surfaces with a mix of attic and non attic constructions which may produce unexpected results. The following surfaces use constructions not tagged as attic and will not be altered: #{surfacesWithNonAtticConstructions.sort.join(',')}.")
# confirm that all spaces have at least one or more surface of both exterior attic and interior attic
if !interiorAtticSurfaceInSpace
runner.registerWarning("#{atticSpace.name} has at least one exterior attic surface but does not have an interior attic surface. Please confirm that this space is intended to be an attic and update the constructions used.")
end
# see if attic is part of floor area and/or if it has people in it
if atticSpace.partofTotalFloorArea
runner.registerWarning("#{atticSpace.name} is part of the floor area. That is not typical for an attic.")
end
if !atticSpace.people.empty?
runner.registerWarning("#{atticSpace.name} has people. That is not typical for an attic.")
end
end
# hash to look for classification conflicts in attic constructions
atticConstructionLog = {}
# test attic constructions and identify conflicts
# conflict resolution order (insulation,sri,nothing-for demising)
atticSurfacesInterior.each do |surface, construction|
next if atticConstructionLog[construction] == 'atticSurfacesInterior'
conductivity_ip = atticConstructions[construction]['conductivity_ip']
# test construction against rules
ruleSet = rulesHash["#{climateZoneNumber} Attic"]
if conductivity_ip > ruleSet['conductivity_ip']
atticConstructions[construction] = { 'conductivity_ip' => conductivity_ip, 'sri' => 'NA', 'transmittance_ip_rule' => ruleSet['conductivity_ip'], 'sri_rule' => 'NA', 'classification' => 'atticSurfacesInterior' }
else
# delete const from main hash
atticConstructions.delete(construction)
end
atticConstructionLog[construction] = 'atticSurfacesInterior' # pass in construction object and the type of rule it was tested against
end
atticSurfacesExteriorExposed.each do |surface, construction|
next if atticConstructionLog[construction] == 'atticSurfacesExteriorExposed'
sri = atticConstructions[construction]['sri']
# warn user if construction used on attic interior surface
if atticConstructionLog[construction] == 'atticSurfacesInterior'
runner.registerWarning("#{surface.name} appears to be an exterior surface but uses a construction #{construction.name} that is also used on interior attic surfaces. The construction was classified and tested as an insulated interior attic construction. You may see unexpected results.")
next
end
# test construction against rules
ruleSet = rulesHash["#{climateZoneNumber} Attic"]
if sri < ruleSet['sri']
atticConstructions[construction] = { 'conductivity_ip' => 'NA', 'sri' => sri, 'transmittance_ip_rule' => 'NA', 'sri_rule' => ruleSet['sri'], 'classification' => 'atticSurfacesExteriorExposed' }
else
# delete const from main hash
atticConstructions.delete(construction)
end
atticConstructionLog[construction] = 'atticSurfacesExteriorExposed' # pass in construction object and the type of rule it was tested against
end
atticSurfacesOtherAtticDemising.each do |k, construction|
next if atticConstructionLog[construction] == 'atticSurfacesOtherAtticDemising'
sri = atticConstructions[construction]['sri']
# warn user if construction used on attic interior surface
if atticConstructionLog[construction] == 'atticSurfacesInterior'
runner.registerWarning("#{surface.name} appears to be an exterior surface but uses a construction #{construction.name} that is also used on interior attic surfaces. The construction was classified and tested as an insulated interior attic construction. You may see unexpected results.")
next
elsif atticConstructionLog[construction] == 'atticSurfacesExteriorExposed'
runner.registerWarning("#{surface.name} appears to be an surface between two attic spaces uses a construction #{construction.name} that is also used on exterior attic surfaces. The construction was classified and tested as an insulated interior attic construction. You may see unexpected results.")
next
end
# delete const from main hash.
atticConstructions.delete(construction)
# No rule test needed for demising.
atticConstructionLog[construction] = 'atticSurfacesOtherAtticDemising' # pass in construction object and the type of rule it was tested against
end
# delete constructions from hash that are non used on roof attic surfaces, but are exterior exposed
atticSurfacesExteriorExposedNonRoof.each do |surface, construction|
if atticSurfacesExteriorExposed.value? construction # make sure I'm checking for value not key
runner.registerWarning("#{surface.name} is a non-roof surface but uses a construction that the measure is treating as an exterior attic roof. Having this associated with a non-roof surface may increase affected area of SRI improvements.")
else
atticConstructions.delete(construction)
end
end
# alter constructions and add lcc
constructionsToChange = ieadConstructions.sort + metalConstructions.sort + atticConstructions.sort
constructionsToChange.each do |construction, hash|
# gather insulation inputs
if hash['transmittance_ip_rule'] != 'NA'
# gather target decrease in conductivity
conductivity_ip_starting = hash['conductivity_ip']
conductivity_si_starting = OpenStudio.convert(conductivity_ip_starting, 'Btu/ft^2*h*R', 'W/m^2*K').get
r_value_ip_starting = 1 / conductivity_ip_starting # ft^2*h*R/Btu
r_value_si_starting = 1 / conductivity_si_starting # m^2*K/W
conductivity_ip_target = hash['transmittance_ip_rule'].to_f
conductivity_si_target = OpenStudio.convert(conductivity_ip_target, 'Btu/ft^2*h*R', 'W/m^2*K').get
r_value_ip_target = 1 / conductivity_ip_target # ft^2*h*R/Btu
r_value_si_target = 1 / conductivity_si_target # m^2*K/W
# infer insulation material to get input for target thickness
minThermalResistance = OpenStudio.convert(1, 'ft^2*h*R/Btu', 'm^2*K/W').get
inferredInsulationLayer = OsLib_Constructions.inferInsulationLayer(construction, minThermalResistance)
rvalue_si_deficiency = r_value_si_target - r_value_si_starting
# add lcc for insulation
lcc_mat_insulation = OpenStudio::Model::LifeCycleCost.createLifeCycleCost("LCC_Mat_Insulation - #{construction.name}", construction, material_cost_insulation_increase_si, 'CostPerArea', 'Construction', expected_life, years_until_costs_start)
lcc_mat_insulation_value = lcc_mat_insulation.get.totalCost
running_cost_insulation += lcc_mat_insulation_value
# adjust existing material or add new one
if inferredInsulationLayer['insulationFound'] # if insulation layer was found
# gather inputs for method
target_material_rvalue_si = inferredInsulationLayer['construction_thermal_resistance'] + rvalue_si_deficiency
# run method to change insulation layer thickness in cloned material (material,starting_r_value_si,target_r_value_si, model)
new_material = OsLib_Constructions.setMaterialThermalResistance(inferredInsulationLayer['construction_layer'], target_material_rvalue_si)
# connect new material to original construction
construction.eraseLayer(inferredInsulationLayer['layer_index'])
construction.insertLayer(inferredInsulationLayer['layer_index'], new_material)
# get conductivity
final_conductivity_si = construction.thermalConductance.get
final_r_value_ip = OpenStudio.convert(1 / final_conductivity_si, 'm^2*K/W', 'ft^2*h*R/Btu').get
# report on edited material
runner.registerInfo("The R-value of #{construction.name} has been increased from #{OpenStudio.toNeatString(r_value_ip_starting, 2, true)} to #{OpenStudio.toNeatString(final_r_value_ip, 2, true)}(ft^2*h*R/Btu) at a cost of $#{OpenStudio.toNeatString(lcc_mat_insulation_value, 2, true)}. Increased performance was accomplished by adjusting thermal resistance of #{new_material.name}.")
else
# inputs to pass to method
conductivity = 0.045 # W/m*K
thickness = rvalue_si_deficiency * conductivity # meters
addNewLayerToConstruction_Inputs = {
'roughness' => 'MediumRough',
'thickness' => thickness, # meters,
'conductivity' => conductivity, # W/m*K
'density' => 265.0,
'specificHeat' => 836.8,
'thermalAbsorptance' => 0.9,
'solarAbsorptance' => 0.7,
'visibleAbsorptance' => 0.7
}
# create new material if can't infer insulation material (construction,thickness, conductivity, density, specificHeat, roughness,thermalAbsorptance, solarAbsorptance,visibleAbsorptance,model)
newMaterialLayer = OsLib_Constructions.addNewLayerToConstruction(construction, addNewLayerToConstruction_Inputs)
# get conductivity
final_conductivity_si = construction.thermalConductance.get
final_r_value_ip = OpenStudio.convert(1 / final_conductivity_si, 'm^2*K/W', 'ft^2*h*R/Btu').get
# report on edited material
runner.registerInfo("The R-value of #{construction.name} has been increased from #{OpenStudio.toNeatString(r_value_ip_starting, 2, true)} to #{OpenStudio.toNeatString(final_r_value_ip, 2, true)}(ft^2*h*R/Btu) at a cost of $#{OpenStudio.toNeatString(lcc_mat_insulation_value, 2, true)}. Increased performance was accomplished by adding a new material layer to the outside of #{construction.name}.")
end
# add to area counter
insulation_affected_area += construction.getNetArea # OpenStudio handles matched surfaces so they are not counted twice.
end
# gather sri inputs
if (hash['sri_rule'] == 78.0) && (hash['sri_rule'] > hash['sri'])
# hard assign material properies that will result in an SRI of 78
setConstructionSurfaceProperties_Inputs = {
'thermalAbsorptance' => 0.86,
'solarAbsorptance' => 1 - 0.65
}
# alter surface properties (construction,roughness,thermalAbsorptance, solarAbsorptance,visibleAbsorptance)
surfaceProperties = OsLib_Constructions.setConstructionSurfaceProperties(construction, setConstructionSurfaceProperties_Inputs)
sri = OsLib_Constructions.getConstructionSRI(construction)
# add lcc for SRI
lcc_mat_sri = OpenStudio::Model::LifeCycleCost.createLifeCycleCost("LCC_Mat_SRI - #{construction.name}", construction, material_cost_sri_increase_si, 'CostPerArea', 'Construction', expected_life, years_until_costs_start)
lcc_mat_sri_value = lcc_mat_sri.get.totalCost
running_cost_sri += lcc_mat_sri_value
# add to area counter
sri_affected_area += construction.getNetArea
# report performance and cost change for material, or area
runner.registerInfo("The Solar Reflectance Index (SRI) of #{construction.name} has been increased from #{OpenStudio.toNeatString(hash['sri'], 0, true)} to #{OpenStudio.toNeatString(sri, 0, true)} for a cost of $#{OpenStudio.toNeatString(lcc_mat_sri_value, 0, true)}. Affected area is #{OpenStudio.toNeatString(OpenStudio.convert(construction.getNetArea, 'm^2', 'ft^2').get, 0, true)} (ft^2)")
end
end
# populate AEDG tip keys
aedgTips = []
if ieadFlag
aedgTips.push('EN01', 'EN02', 'EN17', 'EN19', 'EN21', 'EN22')
end
if atticFlag
aedgTips.push('EN01', 'EN03', 'EN17', 'EN19', 'EN20', 'EN21')
end
if metalFlag
aedgTips.push('EN01', 'EN04', 'EN17', 'EN19', 'EN21')
end
# create not applicable of no constructions were tagged to change
# if someone had a model with only attic floors and no attic ceilings current logic would flag as not applicable, but a warning would be issued alerting them of the issue (attic surface being used outside of attic space)
if aedgTips.empty?
runner.registerAsNotApplicable('No surfaces use constructions tagged as a roof type recognized by this measure. No roofs were altered.')
return true
end
# populate how to tip messages
aedgTipsLong = OsLib_AedgMeasures.getLongHowToTips('SmMdOff', aedgTips.uniq.sort, runner)
if !aedgTipsLong
return false # this should only happen if measure writer passes bad values to getLongHowToTips
end
# reporting initial condition of model
startingRvalue = startingRvaluesExtRoof + startingRvaluesAtticInterior # adding non attic and attic values together
runner.registerInitialCondition("Starting R-values for constructions intended for insulated roof surfaces range from #{OpenStudio.toNeatString(startingRvalue.min, 2, true)} to #{OpenStudio.toNeatString(startingRvalue.max, 2, true)}(ft^2*h*R/Btu). Starting Solar Reflectance Index (SRI) for constructions intended for exterior roof surfaces range from #{OpenStudio.toNeatString(startingSriExtRoof.min, 0, true)} to #{OpenStudio.toNeatString(startingSriExtRoof.max, 0, true)}.")
# reporting final condition of model
insulation_affected_area_ip = OpenStudio.convert(insulation_affected_area, 'm^2', 'ft^2').get
sri_affected_area_ip = OpenStudio.convert(sri_affected_area, 'm^2', 'ft^2').get
runner.registerFinalCondition("#{OpenStudio.toNeatString(insulation_affected_area_ip, 0, true)}(ft^2) of constructions intended for roof surfaces had insulation enhanced at a cost of $#{OpenStudio.toNeatString(running_cost_insulation, 0, true)}. #{OpenStudio.toNeatString(sri_affected_area_ip, 0, true)}(ft^2) of constructions intended for roof surfaces had the Solar Reflectance Index (SRI) enhanced at a cost of $#{OpenStudio.toNeatString(running_cost_sri, 0, true)}. #{aedgTipsLong}")
return true
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def measure; end",
"def measure=(_arg0); end",
"def measure\n\t\t1\n\tend",
"def measure(*args, &b)\n end",
"def communicate_measure_result(_ = nil, _ = nil); end",
"def communicate_measure_result(_ = nil, _ = nil); end",
"def called\n self.measurement.called\n end",
"def measure\n ... | [
"0.79848564",
"0.7639647",
"0.76355976",
"0.7170129",
"0.66926914",
"0.66926914",
"0.66718984",
"0.66311747",
"0.6599127",
"0.65870225",
"0.65324444",
"0.6481582",
"0.6405596",
"0.64028287",
"0.6333309",
"0.6283632",
"0.6283632",
"0.6283632",
"0.6281165",
"0.6269874",
"0.6242... | 0.0 | -1 |
main parser invoked with command `sepp scan` | def scanfile(source, options)
fn = File.basename(source,".*")
if options[:backup]
fback = File.join( File.dirname(source), "#{fn}_bak.md" )
`cp #{source} #{fback}`
end
# read and mark up {bibliography} for later use
File.open(source) { |mdf| @md = mdf.read.gsub('{bibliography}','[--bib--]') }
# get all citation ids (as matchData, therefore enumerate and not just .scan )
cids = @md.to_enum(:scan, /\{.+?\}/).map { Regexp.last_match }
# get all inline references from Sente
inl = sente6('create bibliography elements current library from tags "' + cids.join('||').gsub(/[\{\}]/,'').gsub('\\', '\&\&') + '" for intext').split('||')
bib = sente6('create bibliography elements current library from tags "' + cids.join('||').gsub(/[\{\}]/,'').gsub('\\', '\&\&') + '" for bibliography')
# inverse arrays
cids = cids.reverse
inl = inl.reverse.drop(1)
log = []
err = []
if cids.count == inl.count
cids.to_enum.with_index.each { |cid, i|
# log this to allow for unscanning later
# original citation id, formated inline citation, pre-match string (if formated in not unique)
log << cid.to_s + '|' + inl[i] + '|' + cid.pre_match.split(//).last($handle).join
# through error if no match found
err << cid.to_s if inl[i] == '()'
# replace citation id with formated string, using the offsets in cids's matchData
o = cid.offset(0)
@md[(o[0])..(o[1] - 1)] = inl[i]
}
end
if err.count > 0
# notity
@message = 'No match found for: ' + err.join(", ")
if options[:notify]
TerminalNotifier.notify(@message, :title => "No bibliography produced...", :open => "http://www.something.com")
end
puts @message
else
# Write output file (with added bibliography)
File.open(source, 'w') {|f| f << @md.gsub('[--bib--]', bib.gsub('.<i> ','. *').gsub("<br>", "\n\n").gsub('<i>','*').gsub('</i>','*'))}
# Write log file
`mkdir -p "$HOME/Library/Application Support/sepp"`
File.open(ENV["HOME"] + '/Library/Application Support/sepp/' + fn + '.log', 'w') {|f| f << log.join("\n")}
#notify
@message = 'Inline: ' + inl.count.to_s + '; References: ' + bib.gsub("<br>", "\n").lines.count.to_s + '.'
if options[:notify]
TerminalNotifier.notify(@message, :title => "Successfully produces bibliography", :open => "http://www.something.com")
end
puts @message
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def parse(args)\n #add the possible arguments to the argument parser\n\topt_parser = OptionParser.new do |opts|\n\t\topts.banner = \"Usage: Scanner.rb [options] file [files...]\"\n\t\topts.on(\"-s [FILE]\", \"--save [FILE]\", \"Saves to file [FILE]\") do |file|\n\t\t\t$save2File = true\n\t\t\t$file = File.new(Pat... | [
"0.66263777",
"0.6449215",
"0.6449215",
"0.6449215",
"0.6449215",
"0.6363091",
"0.63622653",
"0.63622653",
"0.63622653",
"0.63622653",
"0.63011444",
"0.62813514",
"0.6229778",
"0.6229778",
"0.61380994",
"0.6133653",
"0.61229753",
"0.60990256",
"0.6063994",
"0.60474896",
"0.60... | 0.0 | -1 |
Ensure deleted users cannot sign in def active_for_authentication? super && !deleted_at end Used for user authentication | def ensure_authentication_token (remember)
token = AuthToken.create!(token: generate_authentication_token,
remember: remember,
user_id: self.id)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def active_for_authentication?\n super && !deleted_at\n end",
"def active_for_authentication?\n super && !deleted_at\n end",
"def active_for_authentication?\n super && !deleted_at\n end",
"def active_for_authentication?\n super && !deleted_at\n end",
"def active_for_authentication?\n ... | [
"0.89555746",
"0.894714",
"0.894714",
"0.894714",
"0.894714",
"0.894714",
"0.894714",
"0.894714",
"0.894714",
"0.894714",
"0.894714",
"0.894714",
"0.894714",
"0.894714",
"0.894714",
"0.894714",
"0.894714",
"0.894714",
"0.894714",
"0.8938769",
"0.8898861",
"0.8873535",
"0.... | 0.0 | -1 |
Difficulty: medium/hard Write a method that takes an array of consecutive (increasing) letters as input and that returns the missing letter in the array. You will always get an valid array. And it will be always exactly one letter be missing. The length of the array will always be at least 2. The array will always contain letters in only one case. Example: missing_letter(['a','b','c','d','f']) > 'e' missing_letter(['O','Q','R','S']) > 'P' Included test uses rspec 22 May 2019 | def missing_letter2(word)
alphabet = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
for letter in alphabet.split("")
if letter == word[0]
start = alphabet.index(letter)
break
end
end
for letter in word
if letter != alphabet[start]
return alphabet[start]
else start +=1
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def find_missing_letter(arr)\n alphabets = (arr[0]..arr[-1]).to_a # [\"a\", \"b\", \"c\", \"d\", \"e\", \"f\"]\n \n (alphabets - arr)[0] # \"e\"\nend",
"def determine_missing_letter(arr)\n alpha = ('A'..'z').to_a\n current_index = alpha.index(arr.first)\n arr.each_with_index do |char, index|\n next if i... | [
"0.84147185",
"0.826034",
"0.8207233",
"0.8195958",
"0.8122356",
"0.81194705",
"0.80738145",
"0.8004391",
"0.79845405",
"0.7968818",
"0.7930456",
"0.7927365",
"0.7817498",
"0.78102314",
"0.77927554",
"0.7764586",
"0.7754829",
"0.7641039",
"0.7620936",
"0.7615697",
"0.75935477... | 0.7392816 | 29 |
22 May 2019 learnt .each_with_index, .map.with_index (have nuanced differences) also .next and .succ (mean the same) | def missing_letter(word)
word.each_with_index do |letter, i|
return letter.next if word[i + 1] != letter.next
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def map_with_index &block\n index = 0\n map do |element|\n result = yield element, index\n index += 1\n result\n end\n end",
"def map_with_index(array, &block)\n result = []\n index = 0\n array.each do |element|\n result << block.call(element, index)\n index += 1\n end\n resul... | [
"0.7425129",
"0.68470335",
"0.67555577",
"0.671061",
"0.65570855",
"0.64828056",
"0.64720297",
"0.64579433",
"0.6445572",
"0.64334136",
"0.64287263",
"0.6378571",
"0.6369443",
"0.6353472",
"0.63479966",
"0.6318665",
"0.6314452",
"0.63143396",
"0.6307565",
"0.6300601",
"0.6300... | 0.0 | -1 |
GET /reqcargos GET /reqcargos.json | def index
@reqcargos = Reqcargo.all
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def list_tenants_for_circles(args = {}) \n get(\"/tenants.json/circles\", args)\nend",
"def list_tenants_for_circle(args = {}) \n get(\"/tenantcircles.json/tenants\", args)\nend",
"def request(args = {})\n response = @client.get(args[:url],\n argument_hash(args[:args] || {},\n ... | [
"0.6043053",
"0.596949",
"0.58654505",
"0.5772901",
"0.5599491",
"0.5525767",
"0.54969394",
"0.5478865",
"0.54684436",
"0.5465827",
"0.5415466",
"0.5407471",
"0.54027385",
"0.5386744",
"0.5374827",
"0.5365918",
"0.53389347",
"0.53317386",
"0.53213364",
"0.53172064",
"0.531038... | 0.65913725 | 0 |
GET /reqcargos/1 GET /reqcargos/1.json | def show
add_breadcrumb "Detalles", @reqcargos
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def index\n @reqcargos = Reqcargo.all\n end",
"def request(args = {})\n response = @client.get(args[:url],\n argument_hash(args[:args] || {},\n args[:symbols] || []))\n JSON.parse(response.body)\n end",
"def list_tenants_for... | [
"0.6250531",
"0.5993591",
"0.59115326",
"0.5737192",
"0.56888354",
"0.56508946",
"0.56508946",
"0.56002825",
"0.55560195",
"0.5546309",
"0.5544354",
"0.55347097",
"0.5504108",
"0.5493895",
"0.5487917",
"0.5486926",
"0.5486926",
"0.54837763",
"0.5443829",
"0.5427351",
"0.54142... | 0.0 | -1 |
POST /reqcargos POST /reqcargos.json | def create
@reqcargo = Reqcargo.new(reqcargo_params)
respond_to do |format|
if @reqcargo.save
format.html { redirect_to @reqcargo, notice: 'Reqcargo was successfully created.' }
format.json { render :show, status: :created, location: @reqcargo }
else
format.html { render :new }
format.json { render json: @reqcargo.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def reqcargo_params\n params.require(:reqcargo).permit(:cargo_id, :nomrequis, :lactivo)\n end",
"def add_tenant_circle(args = {}) \n post(\"/tenantcircles.json/\", args)\nend",
"def do_coaps_posted_03\n # get the Base64 of the incoming signed request\n body = IO.read(\"spec/files/vr_00-D0-E5-... | [
"0.5948494",
"0.58488387",
"0.5842652",
"0.5673049",
"0.5657613",
"0.5448429",
"0.544024",
"0.5406381",
"0.53049535",
"0.5302718",
"0.52997845",
"0.52974343",
"0.5248637",
"0.52108425",
"0.5198508",
"0.5189851",
"0.51875246",
"0.51829207",
"0.51804996",
"0.51787615",
"0.51628... | 0.63217616 | 0 |
PATCH/PUT /reqcargos/1 PATCH/PUT /reqcargos/1.json | def update
respond_to do |format|
if @reqcargo.update(reqcargo_params)
format.html { redirect_to @reqcargo, notice: 'Reqcargo was successfully updated.' }
format.json { render :show, status: :ok, location: @reqcargo }
else
format.html { render :edit }
format.json { render json: @reqcargo.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def patch!\n request! :patch\n end",
"def api_patch(path, data = {})\n api_request(:patch, path, :data => data)\n end",
"def patch(path, data)\n request 'PATCH', path, body: data.to_json\n end",
"def patch\n headers = {\"If-Match\" => @version}\n response = @context.request :pat... | [
"0.66986567",
"0.6543164",
"0.62974703",
"0.629693",
"0.6295578",
"0.62917954",
"0.6291602",
"0.6291602",
"0.6220814",
"0.61976075",
"0.6148801",
"0.614587",
"0.614587",
"0.614587",
"0.614587",
"0.60867625",
"0.6045554",
"0.60430604",
"0.60430604",
"0.6033557",
"0.6029839",
... | 0.64072025 | 2 |
DELETE /reqcargos/1 DELETE /reqcargos/1.json | def destroy
@reqcargo.destroy
respond_to do |format|
format.html { redirect_to reqcargos_url, notice: 'Reqcargo was successfully destroyed.' }
format.json { head :no_content }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def delete_tenant_circle(args = {}) \n delete(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend",
"def delete(path)\n RestClient.delete request_base+path\n end",
"def delete(*args)\n request(:delete, *args)\n end",
"def delete\n request(:delete)\n end",
"def delete path\n ... | [
"0.72877944",
"0.70265955",
"0.6978186",
"0.695261",
"0.692509",
"0.6921337",
"0.6902557",
"0.68772715",
"0.68576765",
"0.6856065",
"0.68432945",
"0.68214524",
"0.67697966",
"0.676899",
"0.67601794",
"0.67601794",
"0.6754053",
"0.6733256",
"0.6717482",
"0.6716133",
"0.6712523... | 0.71188056 | 1 |
Use callbacks to share common setup or constraints between actions. | def set_reqcargo
@reqcargo = Reqcargo.find(params[:id])
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_required_actions\n # TODO: check what fields change to asign required fields\n end",
"def action_hook; end",
"def run_actions; end",
"def define_action_hook; end",
"def actions; end",
"def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_... | [
"0.6163163",
"0.6045976",
"0.5946146",
"0.591683",
"0.5890051",
"0.58349305",
"0.5776858",
"0.5703237",
"0.5703237",
"0.5652805",
"0.5621621",
"0.54210985",
"0.5411113",
"0.5411113",
"0.5411113",
"0.5391541",
"0.53794575",
"0.5357573",
"0.53402257",
"0.53394014",
"0.53321576"... | 0.0 | -1 |
Never trust parameters from the scary internet, only allow the white list through. | def reqcargo_params
params.require(:reqcargo).permit(:cargo_id, :nomrequis, :lactivo)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def strong_params\n params.require(:user).permit(param_whitelist)\n end",
"def strong_params\n params.require(:listing_member).permit(param_whitelist)\n end",
"def allow_params_authentication!; end",
"def allowed_params\n ALLOWED_PARAMS\n end",
"def default_param_whitelist\n [\"mode\"]\n... | [
"0.69792545",
"0.6781151",
"0.67419964",
"0.674013",
"0.6734356",
"0.6591046",
"0.6502396",
"0.6496313",
"0.6480641",
"0.6477825",
"0.64565",
"0.6438387",
"0.63791263",
"0.63740575",
"0.6364131",
"0.63192815",
"0.62991166",
"0.62978333",
"0.6292148",
"0.6290449",
"0.6290076",... | 0.0 | -1 |
GET /profiles GET /profiles.json | def index
@profiles = Profile.all
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def index\n authorize Profile\n @profiles = ProfilePolicy::Scope.new(current_user, @user.profiles).resolve\n render json: @profiles\n end",
"def get_default_profile \n get(\"/profiles.json/default\")\nend",
"def profiles \n personid = params[:id]\n @response = JSON.parse(current_user.access... | [
"0.7720244",
"0.7511753",
"0.7511205",
"0.7474065",
"0.747391",
"0.7429771",
"0.7368022",
"0.7368022",
"0.7277252",
"0.72754085",
"0.72646606",
"0.7194632",
"0.71890444",
"0.7109752",
"0.7068873",
"0.7061951",
"0.7061951",
"0.7061951",
"0.7036681",
"0.70242476",
"0.699903",
... | 0.6769825 | 54 |
GET /profiles/1 GET /profiles/1.json | def show
@country = Carmen::Country.coded(@profile.country_code)
@subregions = @country.subregions
@state = @subregions.coded(@profile.state_code)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_default_profile \n get(\"/profiles.json/default\")\nend",
"def my_profiles\n @user = User.find(params[:user_id])\n @profiles = @user.profiles\n end",
"def show\n profile = Profile.find(params[:id])\n render status: 200, json: profile\n end",
"def index\n authorize Profile\n @profi... | [
"0.7775649",
"0.74931335",
"0.74876684",
"0.736598",
"0.7305961",
"0.7302657",
"0.7301793",
"0.72450936",
"0.72319347",
"0.72319347",
"0.72319347",
"0.72181976",
"0.72181976",
"0.71651715",
"0.71410364",
"0.7114219",
"0.70800215",
"0.7046293",
"0.70175827",
"0.69905627",
"0.6... | 0.0 | -1 |
POST /profiles POST /profiles.json | def create
@profile = current_user.build_profile(pro_params)
respond_to do |format|
if @profile.save
format.html { redirect_to @profile, notice: 'Profile was successfully created.' }
format.json { render :show, status: :created, location: @profile }
else
format.html { redirect_to new_profile_path, alert: 'Please fill all fields' }
format.json { render json: @profile.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n @profile = current_user.profiles.build(profile_params)\n\n respond_to do |format|\n if @profile.save\n format.html { redirect_to @profile, notice: 'Profile was successfully created.' }\n format.json { render action: 'show', status: :created, location: @profile }\n else\n ... | [
"0.73701555",
"0.7367171",
"0.72288865",
"0.71786684",
"0.7172963",
"0.7170674",
"0.71145105",
"0.7096289",
"0.707034",
"0.7002038",
"0.7002038",
"0.7002038",
"0.7002038",
"0.7002038",
"0.7002038",
"0.7002038",
"0.6997514",
"0.69700205",
"0.69617796",
"0.6937631",
"0.6937631"... | 0.6739768 | 46 |
PATCH/PUT /profiles/1 PATCH/PUT /profiles/1.json | def update
respond_to do |format|
if @profile.update(pro_params)
format.html { redirect_to @profile, notice: 'Profile was successfully updated.' }
format.json { render :show, status: :ok, location: @profile }
else
format.html { render :edit }
format.json { render json: @profile.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update\n @profiles = current_user.profiles.find(params[:id])\n\n respond_to do |format|\n if @profiles.update(profile_params)\n format.html { redirect_to profiles_path, notice: 'Profile was successfully updated.' }\n format.json { render :show, status: :ok, location: @profiles }\n ... | [
"0.7385554",
"0.7121749",
"0.7113102",
"0.7096679",
"0.7091465",
"0.7091465",
"0.7091465",
"0.7076842",
"0.707045",
"0.7063198",
"0.70600575",
"0.7022012",
"0.70120174",
"0.699651",
"0.6985236",
"0.6985236",
"0.6985236",
"0.6985236",
"0.6979028",
"0.697453",
"0.6968044",
"0... | 0.680932 | 55 |
DELETE /profiles/1 DELETE /profiles/1.json | def destroy
@profile.destroy
respond_to do |format|
format.html { redirect_to profiles_url, notice: 'Profile was successfully destroyed.' }
format.json { head :no_content }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def destroy\n @profile = Profile.find(params[:id])\n @profile.destroy\n\n respond_to do |format|\n format.html { redirect_to profiles_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @profile = Profile.find(params[:id])\n @profile.destroy\n\n respond_to do |format|\n ... | [
"0.7739456",
"0.7739456",
"0.7739456",
"0.7707937",
"0.77065504",
"0.77065504",
"0.77065504",
"0.77065504",
"0.77065504",
"0.77065504",
"0.77065504",
"0.77065504",
"0.7706363",
"0.7704212",
"0.769637",
"0.7687797",
"0.7687588",
"0.7687588",
"0.76273435",
"0.7588475",
"0.75627... | 0.74744916 | 53 |
Use callbacks to share common setup or constraints between actions. | def set_profile
@profile = Profile.find(params[:id])
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_required_actions\n # TODO: check what fields change to asign required fields\n end",
"def action_hook; end",
"def run_actions; end",
"def define_action_hook; end",
"def actions; end",
"def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_... | [
"0.6163163",
"0.6045976",
"0.5946146",
"0.591683",
"0.5890051",
"0.58349305",
"0.5776858",
"0.5703237",
"0.5703237",
"0.5652805",
"0.5621621",
"0.54210985",
"0.5411113",
"0.5411113",
"0.5411113",
"0.5391541",
"0.53794575",
"0.5357573",
"0.53402257",
"0.53394014",
"0.53321576"... | 0.0 | -1 |
Never trust parameters from the scary internet, only allow the white list through. | def profile_params
params.fetch(:profile, {})
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def strong_params\n params.require(:user).permit(param_whitelist)\n end",
"def strong_params\n params.require(:listing_member).permit(param_whitelist)\n end",
"def allow_params_authentication!; end",
"def allowed_params\n ALLOWED_PARAMS\n end",
"def default_param_whitelist\n [\"mode\"]\n... | [
"0.69792545",
"0.6781151",
"0.67419964",
"0.674013",
"0.6734356",
"0.6591046",
"0.6502396",
"0.6496313",
"0.6480641",
"0.6477825",
"0.64565",
"0.6438387",
"0.63791263",
"0.63740575",
"0.6364131",
"0.63192815",
"0.62991166",
"0.62978333",
"0.6292148",
"0.6290449",
"0.6290076",... | 0.0 | -1 |
Initializes a new AttributeGroupData instance with the specified name and an empty set of rows. +name+:: The name of the attribute group whose data is being represented. | def initialize(name)
@name = name
@rows = Array.new
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def initialize(name)\n @name = name\n @groups = Levels::KeyValues.new\n end",
"def initialize(name)\n @name = name.to_s\n @attributes = {}\n @criteria = []\n end",
"def initialize(name)\n raise BadDataException, \"No name present\" if name.empty?\n @name = name\n e... | [
"0.6130959",
"0.59542716",
"0.5819765",
"0.58026236",
"0.56381595",
"0.5405932",
"0.5394563",
"0.5389596",
"0.53759176",
"0.53447115",
"0.52595925",
"0.5246296",
"0.52027625",
"0.50712967",
"0.5062148",
"0.50427884",
"0.5036842",
"0.5033747",
"0.5016151",
"0.50087273",
"0.495... | 0.6525518 | 0 |
Simply passes on Enumerable calls to the instance's rows. | def each(&block)
@rows.each(&block)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def map!\n @rows = @rows.map { |row| yield row, self }\n end",
"def each_row\n @rows.each { |row| yield(row) }\n self\n end",
"def each_row\n end",
"def each(&block)\n rows.each(&block)\n end",
"def each(&block)\n @rows.each(&block)\n end",
"def each(&block)\n ... | [
"0.7541582",
"0.7363567",
"0.725783",
"0.7149952",
"0.71447235",
"0.71447235",
"0.7054897",
"0.7043491",
"0.7016991",
"0.70161694",
"0.6978905",
"0.685917",
"0.6853187",
"0.6813307",
"0.68119425",
"0.674367",
"0.6717421",
"0.67022157",
"0.66690457",
"0.6616909",
"0.6603494",
... | 0.72366005 | 4 |
Returns a tabular string representation of the attribute group's data. If +column_names+ is non+nil+, only returns data for the columns specified; otherwise returns data for all columns. +column_names+:: An array of columsn to display data for. +format+:: The format to output data in. Must be either :text or :html. If it is not one of these, outputs as text. | def to_table(column_names=nil, format="text")
require 'tabular'
stringio = StringIO.new;
$stderr.puts "Request for #{@name} data returned #{@rows.length} #{@rows.length != 1 ? "rows" : "row"}."
if rows.length > 0
# If no column names were specified, use all the columns in the first row (in alphabetical order)
column_names = @rows[0].keys.sort if !column_names
# Create a table with a Column for each column name
columns = column_names.collect { |column_name| Tabular::Column.new(column_name) }
table = Tabular::Table.new(*columns)
# Create our array of data rows
data = @rows.collect { |row| column_names.collect { |column_name| row[column_name] } }
# Set the table's data and write out the tabular representation to stringio
table.data = data
stringio.puts(format == :html ? table.to_html("Data for #{self.name}") : table.to_s)
end
stringio.string
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def serialize_attributes(format, *columns)\n if format.is_a?(Symbol)\n unless format = REGISTERED_FORMATS[format]\n raise(Error, \"Unsupported serialization format: #{format} (valid formats: #{REGISTERED_FORMATS.keys.map(&:inspect).join})\")\n end\n end\n ... | [
"0.53699476",
"0.5357202",
"0.5232327",
"0.51930964",
"0.5192452",
"0.51601917",
"0.51322305",
"0.5056988",
"0.5056476",
"0.50357574",
"0.4994563",
"0.49546322",
"0.49168053",
"0.49091384",
"0.49007094",
"0.4882268",
"0.4877234",
"0.4877234",
"0.4839324",
"0.47806492",
"0.477... | 0.6304722 | 0 |
Gets the specified attribute group's data from the agent/service_point specified and returns a new AttributeGroupData object which contains that data. +agent_address+:: The address of the machine on which the agent resides. +service_point+:: The service point of the agent from which to collect the data. +username+:: The username to use when connecting to the specified service point. +password+:: The password to use when connecting to the specified service point. +attribute_group+:: The TEMS name of the attribute group whose data should be collected. +subnodes+:: An array of subnode managed system names for which to collect data. +port+:: The port via which to communicate with the agent. Defaults to 1920. | def get_attribute_group_data(agent_address, service_point, username, password, attribute_group, subnodes, port=1920)
require 'net/http'
uri = URI("http://#{agent_address}:#{port}///#{service_point}/#{service_point}")
body = "<REPORT><SQLTABLE NULLATTROUTPUT=\"Y\"><TABLENAME>#{attribute_group}</TABLENAME><SUBNODES>#{subnodes.join(",")}</SUBNODES></SQLTABLE></REPORT>"
# warn "Sending HTTP POST to #{uri} with body #{body}"
request = Net::HTTP::Post.new(uri.path)
request.basic_auth(username, password)
request.body = body
response = Net::HTTP.start(uri.host, uri.port) { |http| http.request(request) }
if not response.is_a?(Net::HTTPOK)
$stderr.puts "Response #{response.code}: #{response.body}"
return nil
end
# warn "Received #{response.body}"
return AttributeGroupData.new_with_xml(response.body)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def parse_args(args)\n require 'ostruct'\n require 'optparse'\n\n parser = OptionParser.new\n options = OpenStruct.new\n\n options.output = STDOUT\n options.format = :text\n options.columns = nil\n options.subnodes = [ ]\n\n parser.banner = \"Usage: #{$0} [options] agent_address service_point username pas... | [
"0.48148233",
"0.4416741",
"0.42515406",
"0.42063287",
"0.41547632",
"0.4098359",
"0.40892184",
"0.4080662",
"0.40591976",
"0.40394482",
"0.39851138",
"0.39571363",
"0.39442456",
"0.3898882",
"0.3848071",
"0.3843621",
"0.3828492",
"0.38222227",
"0.38091433",
"0.3783533",
"0.3... | 0.7813658 | 0 |
Parses the specified arguments (presumably ARGV), returning the agent address, service point, username, password, and attribute group that were specified on the command line, in addition a list of columns and subnodes in an OpenStruct object. +args+:: The arguments to parse. These presumably came from the commandline. | def parse_args(args)
require 'ostruct'
require 'optparse'
parser = OptionParser.new
options = OpenStruct.new
options.output = STDOUT
options.format = :text
options.columns = nil
options.subnodes = [ ]
parser.banner = "Usage: #{$0} [options] agent_address service_point username password attribute_group"
parser.on("-h", "--help", "Show this message.") { $stderr.puts parser; exit }
parser.on("-f", "--format [ 'text' | 'html' ]", [:text, :html],
"Set the output format to either plain text or HTML.", "[default: text]") do |f|
options.format = f
end
parser.on("-c", "--columns c1,...,cN", Array, "Output columns c1,...,cN.", "[default: all columns]") { |c| options.columns = c }
parser.on("-s", "--subnodes s1,...,sN", Array, "Collects data for subnodes s1,...,sN.", "[default: no subnodes]") { |s| options.subnodes = s }
rest = parser.parse(args)
# Ensure we have all the required args
if rest.length != 5
$stderr.puts parser
exit
end
return *rest, options
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def parse_args()\n opts = GetoptLong.new(\n ['--host', GetoptLong::OPTIONAL_ARGUMENT],\n ['--port', GetoptLong::OPTIONAL_ARGUMENT],\n ['--columns', GetoptLong::OPTIONAL_ARGUMENT],\n ['--index', GetoptLong::REQUIRED_ARGUMENT],\n ['--type', GetoptLong::REQUIRED_ARGUMENT]\n )\n\n opts.each do |opt, ... | [
"0.6412463",
"0.6116028",
"0.60383564",
"0.5975493",
"0.59547824",
"0.58309287",
"0.571015",
"0.56809366",
"0.56716585",
"0.5657897",
"0.5644503",
"0.56317717",
"0.5618292",
"0.5602395",
"0.5582551",
"0.55758756",
"0.5542489",
"0.55292755",
"0.552834",
"0.55140316",
"0.551362... | 0.86728346 | 0 |
GET /produtividades/1 GET /produtividades/1.xml | def show
@produtividade = Produtividade.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.xml { render :xml => @produtividade }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def show\n @produto = Produto.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @produto }\n end\n end",
"def show\n @produto = Produto.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.x... | [
"0.7011869",
"0.7011869",
"0.7011869",
"0.6863754",
"0.67091066",
"0.665916",
"0.66269976",
"0.6607207",
"0.6559279",
"0.65545934",
"0.65191364",
"0.65031767",
"0.64880395",
"0.6480338",
"0.64791906",
"0.64757663",
"0.6466996",
"0.6466677",
"0.6461223",
"0.6456405",
"0.645474... | 0.6972938 | 3 |
GET /produtividades/new GET /produtividades/new.xml | def new
@produtividade = Produtividade.new
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @produtividade }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new\n @produto = Produto.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @produto }\n end\n end",
"def new\n @produto = Produto.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @produto }\... | [
"0.77489555",
"0.77489555",
"0.77235126",
"0.7571242",
"0.75583035",
"0.74109846",
"0.7407102",
"0.7346539",
"0.73421234",
"0.7335259",
"0.73333687",
"0.73301154",
"0.7316158",
"0.73159915",
"0.73159915",
"0.73127204",
"0.73124444",
"0.7309334",
"0.73070246",
"0.7294046",
"0.... | 0.7659268 | 3 |
POST /produtividades POST /produtividades.xml | def create
@produtividade = Produtividade.new(params[:produtividade])
respond_to do |format|
if @produtividade.save
format.html { redirect_to(@produtividade, :notice => 'Produtividade was successfully created.') }
format.xml { render :xml => @produtividade, :status => :created, :location => @produtividade }
else
format.html { render :action => "new" }
format.xml { render :xml => @produtividade.errors, :status => :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n @produto = Produto.new(params[:produto])\n\n respond_to do |format|\n if @produto.save\n format.html { redirect_to(@produto, :notice => 'Produto was successfully created.') }\n format.xml { render :xml => @produto, :status => :created, :location => @produto }\n else\n ... | [
"0.63159233",
"0.63159233",
"0.6264138",
"0.6236292",
"0.614043",
"0.61105925",
"0.60872656",
"0.6045021",
"0.6014807",
"0.59685445",
"0.5956502",
"0.5954889",
"0.59487337",
"0.59463733",
"0.5938046",
"0.59338456",
"0.5927313",
"0.5926482",
"0.5918086",
"0.5918086",
"0.591808... | 0.65412855 | 0 |
PUT /produtividades/1 PUT /produtividades/1.xml | def update
@produtividade = Produtividade.find(params[:id])
respond_to do |format|
if @produtividade.update_attributes(params[:produtividade])
format.html { redirect_to(@produtividade, :notice => 'Produtividade was successfully updated.') }
format.xml { head :ok }
else
format.html { render :action => "edit" }
format.xml { render :xml => @produtividade.errors, :status => :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update\n @produto.update(produto_params)\n respond_with @produto\n end",
"def update opts = {}\n opts[:headers] ||= {}\n opts[:headers]['Content-Type'] ||= 'text/xml'\n post 'update', opts\n end",
"def update\n @produto = Produto.find(params[:id])\n\n respond_to do |format|\n if... | [
"0.67078876",
"0.664587",
"0.6568118",
"0.6568118",
"0.64782166",
"0.6350164",
"0.63406724",
"0.6331036",
"0.6289379",
"0.62521005",
"0.6245974",
"0.6214503",
"0.6185831",
"0.615584",
"0.61452305",
"0.6135821",
"0.6135462",
"0.61248356",
"0.61248356",
"0.61248356",
"0.6102463... | 0.64875823 | 4 |
DELETE /produtividades/1 DELETE /produtividades/1.xml | def destroy
@produtividade = Produtividade.find(params[:id])
@produtividade.destroy
respond_to do |format|
format.html { redirect_to(produtividades_url) }
format.xml { head :ok }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def destroy\n #@produto = Produto.find(params[:id])\n @produto.destroy\n\n respond_to do |format|\n format.html { redirect_to(produtos_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @produto = Produto.find(params[:id])\n @produto.destroy\n\n respond_to do |format|\... | [
"0.6952404",
"0.693134",
"0.693134",
"0.6847999",
"0.68100935",
"0.6786605",
"0.6731185",
"0.67260826",
"0.6707294",
"0.6671568",
"0.66710526",
"0.66505945",
"0.66400695",
"0.6636899",
"0.6615354",
"0.6615047",
"0.6608686",
"0.6607667",
"0.6606516",
"0.6606462",
"0.66016257",... | 0.6804053 | 5 |
=> Retourne un vrai clone de l'accord | def clone
Chord::new self.to_hash
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def clone\n super\n end",
"def clone() end",
"def clone\n end",
"def clone\n end",
"def clone\n end",
"def clone\n super\n end",
"def clone; end",
"def clone; end",
"def clone; end",
"def clone\n end",
"def clone(*) end",
"def clone(*) end",
"def clone\n @clone ||= su... | [
"0.74546385",
"0.7397606",
"0.73529285",
"0.73529285",
"0.73529285",
"0.7342701",
"0.7327746",
"0.7327746",
"0.7327746",
"0.7322227",
"0.7093711",
"0.7093711",
"0.70727116",
"0.69959784",
"0.6974457",
"0.696427",
"0.6865983",
"0.6811762",
"0.6811762",
"0.68043834",
"0.6770732... | 0.64461833 | 27 |
GET /webs GET /webs.json | def index
# @webs = Web.all[0..100]
# @webs = Web.where(urlx: FALSE).where.not(url_ver_date: nil).order("url_ver_date DESC")[0..100]
# @webs = Web.where(urlx: FALSE).
# where.not(url_ver_date: nil).
# order("url_ver_date DESC").
# paginate(:page => params[:page], :per_page => 20)
@webs = Web.where(urlx: FALSE).
where.not(url_ver_date: nil).
order(sort_column + ' ' + sort_direction).
paginate(:page => params[:page], :per_page => 50)
respond_to do |format|
format.html
format.js
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def show\n @web = Web.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @web }\n end\n end",
"def index\n @web_data = WebData.all\n end",
"def web_request\n response_string = RestClient.get('http://www.swapi.co/api/people/')\n resp... | [
"0.68628013",
"0.63853025",
"0.6382949",
"0.6311019",
"0.62302643",
"0.61907417",
"0.6185653",
"0.617709",
"0.6166561",
"0.61606383",
"0.61360353",
"0.61353517",
"0.6105999",
"0.60638416",
"0.605097",
"0.5969948",
"0.59571475",
"0.5934651",
"0.5934212",
"0.59248626",
"0.59230... | 0.0 | -1 |
GET /webs/1 GET /webs/1.json | def show
respond_to do |format|
format.html
format.js
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def show\n @web = Web.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @web }\n end\n end",
"def show\n @web_app = WebApp.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render js... | [
"0.7459528",
"0.6844274",
"0.6693412",
"0.6692964",
"0.646974",
"0.64299685",
"0.6338626",
"0.6321607",
"0.62605476",
"0.6235527",
"0.6231753",
"0.61801666",
"0.6178771",
"0.61171895",
"0.60300744",
"0.60118186",
"0.59711516",
"0.5927276",
"0.5914978",
"0.590498",
"0.590498",... | 0.0 | -1 |
POST /webs POST /webs.json | def create
@web = Web.new(web_params)
respond_to do |format|
if @web.save
format.html { redirect_to @web, notice: 'Web was successfully created.' }
format.json { render :show, status: :created, location: @web }
else
format.html { render :new }
format.json { render json: @web.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n params[:url_list].each do |url|\n WebUrl.new(:url => url).save\n end\n render :json=>params[:url_list].to_json\n end",
"def create\n @webtoon = Webtoon.new(webtoon_params)\n\n respond_to do |format|\n if @webtoon.save\n format.html { redirect_to @webtoon, notice: 'We... | [
"0.63210946",
"0.5958433",
"0.5936645",
"0.5894691",
"0.5861048",
"0.58557266",
"0.58455706",
"0.5697086",
"0.5642276",
"0.5636574",
"0.5621937",
"0.55018014",
"0.54857093",
"0.5477344",
"0.5440458",
"0.5398539",
"0.5395116",
"0.5392223",
"0.538897",
"0.53792363",
"0.5373494"... | 0.6659848 | 0 |
PATCH/PUT /webs/1 PATCH/PUT /webs/1.json | def update
respond_to do |format|
if @web.update(web_params)
format.html { redirect_to @web, notice: 'Web was successfully updated.' }
format.json { render :show, status: :ok, location: @web }
else
format.html { render :edit }
format.json { render json: @web.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update\n @web = Web.find(params[:id])\n\n respond_to do |format|\n if @web.update_attributes(params[:web])\n format.html { redirect_to @web, notice: 'Web was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n ... | [
"0.6730545",
"0.6436215",
"0.63101006",
"0.6219655",
"0.61617935",
"0.61148036",
"0.60678625",
"0.6048004",
"0.59634805",
"0.59534997",
"0.5946364",
"0.5923348",
"0.5905854",
"0.5894249",
"0.58909994",
"0.587048",
"0.58527076",
"0.5851738",
"0.5850731",
"0.58336055",
"0.58329... | 0.64336544 | 2 |
DELETE /webs/1 DELETE /webs/1.json | def destroy
@web.destroy
respond_to do |format|
format.html { redirect_to webs_url, notice: 'Web was successfully destroyed.' }
format.json { head :no_content }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def destroy\n @web = Web.find(params[:id])\n @web.destroy\n\n respond_to do |format|\n format.html { redirect_to webs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @web_app = WebApp.find(params[:id])\n @web_app.destroy\n\n respond_to do |format|\n form... | [
"0.76778334",
"0.713998",
"0.7040803",
"0.7021795",
"0.6971132",
"0.69428355",
"0.69316286",
"0.69179",
"0.68594915",
"0.68535453",
"0.68518025",
"0.6830644",
"0.681978",
"0.6812523",
"0.68051267",
"0.6794138",
"0.6793108",
"0.6793108",
"0.6793108",
"0.6793108",
"0.67692804",... | 0.7401935 | 1 |
Use callbacks to share common setup or constraints between actions. | def set_web
@web = Web.find(params[:id])
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_required_actions\n # TODO: check what fields change to asign required fields\n end",
"def action_hook; end",
"def run_actions; end",
"def define_action_hook; end",
"def actions; end",
"def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_... | [
"0.6163163",
"0.6045976",
"0.5946146",
"0.591683",
"0.5890051",
"0.58349305",
"0.5776858",
"0.5703237",
"0.5703237",
"0.5652805",
"0.5621621",
"0.54210985",
"0.5411113",
"0.5411113",
"0.5411113",
"0.5391541",
"0.53794575",
"0.5357573",
"0.53402257",
"0.53394014",
"0.53321576"... | 0.0 | -1 |
Never trust parameters from the scary internet, only allow the white list through. | def web_params
params.require(:web).permit(:url, :url_ver_sts, :sts_code, :url_ver_date, :tmp_sts, :temp_name, :tmp_date, :slink_sts, :llink_sts, :stext_sts, :ltext_sts, :pge_date, :as_sts, :as_date, :cs_sts, :cs_date, :created_at, :updated_at)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def strong_params\n params.require(:user).permit(param_whitelist)\n end",
"def strong_params\n params.require(:listing_member).permit(param_whitelist)\n end",
"def allow_params_authentication!; end",
"def allowed_params\n ALLOWED_PARAMS\n end",
"def default_param_whitelist\n [\"mode\"]\n... | [
"0.6980957",
"0.6783065",
"0.6747844",
"0.6741468",
"0.67356336",
"0.6592548",
"0.65036845",
"0.64978707",
"0.64825076",
"0.64795035",
"0.64560914",
"0.64397955",
"0.6379666",
"0.6376688",
"0.6366702",
"0.6319728",
"0.6300833",
"0.6300629",
"0.6294277",
"0.6293905",
"0.629117... | 0.0 | -1 |
Background A Palindrome is a word or phrase which reads the same backward or forward, such as madam or kayak. Implement a Ruby method palindrome? that checks if a given word is a palindrome This method should take one argument (word), a String, and return a Boolean (true of false), telling us if the given word is a palindrome or not You can assume the one argument is a single word It should not be affected by capital letters: palindrome?("racecar") should return true palindrome?("wagon") should return false Resposta: | def palindrome?(word)
return word.downcase == word.downcase.reverse
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def palindrome_word?(word)\n # determines whether a single word is a palindrome\n # ignores case\n # returns true or false\n if word.downcase == word.downcase.reverse\n true\n else false\n end\n\nend",
"def palindrome_word?(word)\n normalized = word.downcase\n normalized.reverse == normalized\nend",
... | [
"0.8780466",
"0.87546605",
"0.8754099",
"0.87351",
"0.8728074",
"0.87151134",
"0.8710223",
"0.87019724",
"0.8690443",
"0.8677623",
"0.86329633",
"0.86141616",
"0.8607497",
"0.8575631",
"0.8565919",
"0.856155",
"0.85587114",
"0.85578465",
"0.8539662",
"0.8517202",
"0.849367",
... | 0.85925263 | 13 |
GET /records/new GET /records/new.json | def new
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new\n @record = Record.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @record }\n end\n end",
"def new\n @record = Record.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @record }\n end\n... | [
"0.7878653",
"0.78784484",
"0.7804585",
"0.7370897",
"0.73114866",
"0.72440517",
"0.7236424",
"0.71809167",
"0.7126915",
"0.710027",
"0.7074209",
"0.7068328",
"0.70324624",
"0.70223385",
"0.6989631",
"0.69868344",
"0.6939417",
"0.68858355",
"0.6875809",
"0.6875809",
"0.686820... | 0.0 | -1 |
POST /records POST /records.json | def create
respond_to do |format|
begin
model = JSON.parse( params[:model] )
rescue
format.html { render action: "new" }
format.json { render json: @record.errors, status: :unprocessable_entity }
else
#@record = Record.where( :email => model["email"] ).first
model.delete('authenticity_token')
if @record.nil?
@record = Record.new(model)
@record.save
else
if @record.score < model["score"] || ( @record.score == model["score"] && @record.time > model["time"] )
@record.update_attributes(model)
end
end
format.html { redirect_to @record, notice: 'Record was successfully created.' }
format.json { render json: @record, status: :created, location: @record }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n @record = Record.new(record_params)\n \n if @record.save\n render json: @record\n else\n render error: {error: \"Unable to create record\"}, status: :400\n end\n end",
"def create\n @record = Record.new(params[:record])\n\n respond_to do ... | [
"0.7008424",
"0.6489232",
"0.63915163",
"0.6380105",
"0.6365723",
"0.633085",
"0.6324324",
"0.6312938",
"0.6291036",
"0.6254665",
"0.62195176",
"0.60934305",
"0.60851955",
"0.60323256",
"0.60116625",
"0.5957577",
"0.594642",
"0.591192",
"0.59088993",
"0.5867076",
"0.5854219",... | 0.5931146 | 17 |
Use callbacks to share common setup or constraints between actions. | def set_address
@address = Addresslanlng.new
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_required_actions\n # TODO: check what fields change to asign required fields\n end",
"def action_hook; end",
"def run_actions; end",
"def define_action_hook; end",
"def actions; end",
"def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_... | [
"0.6163163",
"0.6045976",
"0.5946146",
"0.591683",
"0.5890051",
"0.58349305",
"0.5776858",
"0.5703237",
"0.5703237",
"0.5652805",
"0.5621621",
"0.54210985",
"0.5411113",
"0.5411113",
"0.5411113",
"0.5391541",
"0.53794575",
"0.5357573",
"0.53402257",
"0.53394014",
"0.53321576"... | 0.0 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.