query
stringlengths
7
9.55k
document
stringlengths
10
363k
metadata
dict
negatives
listlengths
0
101
negative_scores
listlengths
0
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
remove_punc takes a string containing text and removes all the punctuation from it in order to finally return a list of words/tokens in the text
def remove_punc(text) word_list = [] # Checking for correct encoding and reencoding the string if necessary if ! text.valid_encoding? text = text.encode("UTF-16be", :invalid=>:replace, :replace=>"?").encode('UTF-8') end # Removing puctuation words = text.split(/[ ,;{}`~!@#$%^&*<>.:"'|?\\()_+=\/\[\]\-]/) # Looping though the list, checking for valid words, and changing their case for word in words word = word[/\w*/] word.downcase! word_list.push(word) end # Deleting blanks word_list.delete("") return word_list end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_punctuation\n gsub /[[:punct:]]/, ''\n end", "def remove_punc_and_add_index(word)\n punc_collection = []\n word.chars.each_with_index do |char, index|\n if char.match?(/[',.\\-]/)\n punc_collection << [char, index]\n end\n end\n punc_collection\nend", "def removeQuot...
[ "0.7275276", "0.7010769", "0.70014066", "0.6991378", "0.6945574", "0.6888791", "0.6863058", "0.68302804", "0.6824321", "0.6711469", "0.6635981", "0.6596482", "0.6585817", "0.6529046", "0.6502719", "0.6480986", "0.64564526", "0.6440277", "0.6398475", "0.63197947", "0.63158095"...
0.7881077
0
function that takes the name of an html file stored on disk, and returns a list of tokens (words) in that file.
def find_tokens(filename) html = File.read(filename) # Parsing the HTML content of the file parsed_html = parse_html(html) # Converting the text into a list of tokens after removing punctuation tokens = remove_punc(parsed_html) return tokens end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def content_tokens\n filename\n end", "def get_html path\n array = []\n\n archivo = File.open(path, 'r:UTF-8')\n archivo.each do |l|\n array.push(l.strip)\n\tend\n archivo.close\n\n return array\nend", "def read_tokens()\n\n # By adding @ to tokens, we're saving it also in the inst...
[ "0.68459404", "0.6549772", "0.63205284", "0.61659604", "0.6140782", "0.611115", "0.593393", "0.59052026", "0.5876549", "0.5870015", "0.581922", "0.58115005", "0.5761115", "0.57432467", "0.5727498", "0.5725638", "0.56991166", "0.56788445", "0.5674697", "0.5656922", "0.565593",...
0.7772613
0
function that takes a list of tokens, and a list (or hash) of stop words, and returns a new list with all of the stop words removed
def remove_stop_tokens(tokens, stop_words) # Looping through the list of tokens and removing all the stop words from the list for i in tokens if stop_words.member?(i) tokens.delete(i) end end return tokens end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_stop_words(list)\n list.select {|word| word unless @stopwords.include? word }\n end", "def remove_stop_words(list)\n if @filter_stop_words\n list.select {|word| !@stopwords.include?(word) }\n else\n list\n end\n end", "def removeBlackList words\n\t\tblacklist ...
[ "0.7914966", "0.78771454", "0.73791426", "0.7214798", "0.6798927", "0.679369", "0.6740443", "0.6705289", "0.66708285", "0.6579485", "0.64807314", "0.6468068", "0.644567", "0.6221474", "0.61697197", "0.61367285", "0.6133781", "0.6107613", "0.6104376", "0.60622966", "0.59931505...
0.84956324
0
function that takes a list of tokens, runs a stemmer on each token, and then returns a new list with the stems
def stem_tokens(tokens) stem_list = [] # Looping through the list and finding the stem word for each word for word in tokens word = word[/\w*/] s = word.stem stem_list.push(s) end return stem_list end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def stem_each(ary)\n ary.map { |term| @stemmer.stem(term) }\n end", "def analyze content\n unless content.respond_to? :split\n raise ArgumentError, \"#{content.class} has no #split\"\n end\n content.split(/\\s/).map {|w| @stemmer.stem w }\n end", "def stem(word)\n stems = []...
[ "0.7383151", "0.64352137", "0.63437766", "0.6334966", "0.61397344", "0.5948949", "0.58141166", "0.57272047", "0.5698453", "0.56388116", "0.56388116", "0.5521572", "0.55069983", "0.5476669", "0.5476669", "0.54756314", "0.54756314", "0.5455903", "0.5452764", "0.5451128", "0.544...
0.8478054
0
get_title takes a file name a returns the text within the HTML title tag of the file
def get_title(file_name) html = File.read(file_name) doc = Nokogiri::HTML(html) begin # Grabbing the title from the page title = doc.css("title")[0].text.strip rescue NoMethodError puts "NoMethodError" puts file_name title = nil end return title end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_title\n r = %r{((\\d+\\.)+\\s*)(?<title>(.)*)\\.html\\.erb}\n match = r.match(file_name)\n raise BadFilenameException, \"Can't match the file: #{file_name}\" unless match\n t = match[:title].strip\n end", "def title\n CGI::unescape(file_name.to_s).gsub(/\\.\\w+$/, '').titleize\n end"...
[ "0.8598528", "0.8095545", "0.80953485", "0.80056685", "0.79147553", "0.78786373", "0.78640026", "0.7862644", "0.7758443", "0.7724523", "0.75914115", "0.7350282", "0.73062795", "0.72698486", "0.72686887", "0.7233489", "0.72191924", "0.7095204", "0.7095204", "0.70673436", "0.70...
0.80919725
3
get_file_detials takes a file name containg the index and returns the data of the file i.e. its name and url in a hash table
def get_file_details(file_name) fd = {} # Looping through the file and updating the name and url variable with the new data # and then finally adding them to the hash table File.readlines(file_name).each do |line| data = line.split(" ") puts data[2] name = data[0] url = data[2] fd[name] = url end puts fd return fd end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def file_list(hash)\n\nend", "def file_list(hash)\n\nend", "def get_file(url); end", "def getFileAt(position)\n require 'rubygems/package'\n require 'zlib'\n\n @files = []\n f = File.new(@filename)\n tar_extract = Gem::Package::TarReader.new(f)\n tar_extract.rewind # The extract has to be r...
[ "0.63250715", "0.63250715", "0.6083411", "0.60423857", "0.6021029", "0.59970486", "0.59480125", "0.5919553", "0.5890083", "0.58740073", "0.5828078", "0.58234", "0.5817403", "0.5815732", "0.5810775", "0.5804879", "0.57983625", "0.57846576", "0.57770556", "0.57633716", "0.57631...
0.6574043
0
index_file takes a file and performs the necessary tasks to index that file in the search engine
def index_file(file, pages_dir, stopwords, file_data) # Removing the dir from the file name # begin actual_name = file.gsub(pages_dir, "") # rescue NoMethodError # actual_name = badpage.html # Resetting the file path file_path = "" file_path = File.expand_path(".") + "/" + file print "Parsing HTML document: " + actual_name + " \n" # Finding all the tokens in the file tokens = find_tokens(file_path) # Getting the page title, word count, and page url page_title = get_title(file_path) word_count = tokens.length page_url = file_data[actual_name] # Updating the docindex hash $docindex[file.gsub(pages_dir, "")] = [word_count, page_title, page_url] # Removing the stop words and getting the stem words in the file tokens = remove_stop_tokens(tokens, stopwords) tokens = stem_tokens(tokens) # Creating the invindex hash table for token in tokens begin if $invindex.member?(token) if $invindex[token].member?(actual_name) $invindex[token][actual_name] += 1 else $invindex[token][actual_name] = 1 end else $invindex[token] = {actual_name => 1} end # end # rescue NoMethodError # puts "NoMethodError" end #puts file_name # title = nil end #end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def open_index_file\n end", "def open_index_file\n @index = File.open(@index_path, 'rb')\n end", "def index\n # Block if this file is currently being indexed by another thread/process\n if indexing?\n while indexing?\n sleep 1\n end\n else\n self.indexing...
[ "0.7740468", "0.7514854", "0.73784924", "0.71144867", "0.7050467", "0.6907303", "0.6863395", "0.6611522", "0.65240586", "0.65150326", "0.6402281", "0.6384633", "0.6300359", "0.62951237", "0.62905693", "0.6271485", "0.6251466", "0.62455493", "0.6238742", "0.6229582", "0.622381...
0.7027357
5
Use callbacks to share common setup or constraints between actions.
def set_item @item = @klass.where(team_id: params[:team_id]).find(params[:id]) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_required_actions\n # TODO: check what fields change to asign required fields\n end", "def action_hook; end", "def run_actions; end", "def define_action_hook; end", "def actions; end", "def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_...
[ "0.6163163", "0.6045976", "0.5946146", "0.591683", "0.5890051", "0.58349305", "0.5776858", "0.5703237", "0.5703237", "0.5652805", "0.5621621", "0.54210985", "0.5411113", "0.5411113", "0.5411113", "0.5391541", "0.53794575", "0.5357573", "0.53402257", "0.53394014", "0.53321576"...
0.0
-1
Initialises a new Aspire::Object instance
def initialize(uri, factory) self.factory = factory # Normalise the URL to the linked data form self.uri = factory ? factory.cache.linked_data_url(uri) : uri end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initialize(obj); end", "def initialize object\n\t\tself.object = object\n\tend", "def initialize\n initialize!\n end", "def initialize\n initialize!\n end", "def initialize() end", "def initialize(object, response = nil)\n @object = object\n\n @end_date = object['end_dat...
[ "0.6506553", "0.64798385", "0.64382607", "0.64382607", "0.63833064", "0.6297461", "0.6266121", "0.62586266", "0.62586266", "0.62586266", "0.62398475", "0.6231504", "0.61862123", "0.61862123", "0.6167852", "0.6160523", "0.61445075", "0.6125816", "0.61192876", "0.6094196", "0.6...
0.0
-1
Returns a Boolean property value
def get_boolean(property, data, single: true) get_property(property, data, single: single) do |value, _type| value ? true : false end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bool_property(name)\n val = property(name)\n (val && val == 'true')\n end", "def get_boolean_value\n\t\tend", "def value\n true\n end", "def value\n return @value unless @value.nil?\n @value = value_dec == :bool_value_true\n @value\n end", "def get_boolean_value(f...
[ "0.8717523", "0.8130486", "0.7639224", "0.7368131", "0.7304756", "0.72858953", "0.7130745", "0.71062964", "0.70823854", "0.70806104", "0.70732677", "0.7058283", "0.7040332", "0.7040332", "0.70148027", "0.69839317", "0.6981775", "0.6970699", "0.6967798", "0.69629", "0.6949662"...
0.807006
2
Returns a DateTime instance for a timestamp property
def get_date(property, data, single: true) get_property(property, data, single: single) do |value, _type| DateTime.parse(value) end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def timestamp\n Time.parse( props[ TIMESTAMP_PROP_NAME ] ) if props\n end", "def timestamp\n timestamp_to_datetime(static_data(\"timestamp\"))\n end", "def datetime_timestamp\n return Date.parse(self.date).to_time\n end", "def timestamp\n _timestamp.as_time\n end", "def time...
[ "0.7321838", "0.71905166", "0.7150863", "0.7128203", "0.71263605", "0.7039782", "0.7027968", "0.6959805", "0.69386595", "0.6847868", "0.6846435", "0.68372166", "0.6700522", "0.6693689", "0.662373", "0.6552022", "0.653928", "0.6507476", "0.6507476", "0.64569145", "0.6407585", ...
0.0
-1
Returns the value of a property
def get_property(property, data, is_url: false, single: true, &block) values = data ? data[property] : nil if values.is_a?(Array) values = values.map do |value| get_property_value(value, is_url: is_url, &block) end single ? values[0] : values else value = get_property_value(values, is_url: is_url, &block) single ? value : [value] end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_property(property)\n get_compound_value(get_value(property))\n end", "def value\n @property_hash[:value]\n end", "def get_property _property\n send_cmd(\"get_property #{_property}\")\n end", "def get_property(property_name)\n value = get() and value[property_name]\n end", "d...
[ "0.844763", "0.8174971", "0.7957079", "0.7908404", "0.7883862", "0.78775865", "0.7710518", "0.76202506", "0.76007766", "0.7599103", "0.7534369", "0.74720305", "0.7430656", "0.7392318", "0.73604846", "0.735042", "0.731903", "0.7311109", "0.7249772", "0.7119159", "0.70912987", ...
0.6808081
35
Returns a string representation of the APIObject instance (the URI)
def to_s uri.to_s end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def inspect\n \"#<#{self.class}:#{object_id.to_s(16)} #{uri.inspect}>\"\n end", "def inspect\n sprintf(\"#<%s:%#0x URI:%s>\", URI.to_s, self.object_id, self.to_s)\n end", "def inspect\n return sprintf(\n \"#<%s:%#0x URI:%s>\",\n self.class.to_s, self.object_id, self.uri.to_s\...
[ "0.74984515", "0.74076825", "0.72245973", "0.7189676", "0.70629364", "0.7042799", "0.7028747", "0.7028747", "0.69840956", "0.6932379", "0.68574905", "0.6845644", "0.6839828", "0.68012464", "0.6775751", "0.6720985", "0.66213286", "0.6602574", "0.6591272", "0.65530396", "0.6520...
0.69544876
9
Sets the URI of the object
def uri=(u) # Remove any format extension (.json, .rdf etc.) ext = File.extname(u) @uri = ext.nil? || ext.empty? ? u : u.rpartition(ext)[0] end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def uri=(value)\n @uri = value\n end", "def uri=(value)\n @uri = value\n end", "def uri= new_uri\n @uri = self.class.build_uri new_uri\n end", "def set_uri(base, path)\n @uri = \"#{base}/#{path}/#{self.identifier}\"\n end", "def uri=(uri)\n ...
[ "0.79979783", "0.79979783", "0.7790412", "0.77748376", "0.72962356", "0.71812826", "0.70826846", "0.6975819", "0.6934346", "0.68815947", "0.6829507", "0.6769047", "0.6769047", "0.6769047", "0.6769047", "0.6746032", "0.6729175", "0.6725151", "0.6725106", "0.67051774", "0.67051...
0.67778647
11
Retrieves and transforms the property value
def get_property_value(value, is_url: false) # Assume hash values are a type/value pair if value.is_a?(Hash) type = value['type'] value = value['value'] else type = nil end # Apply transformations to string properties value = transform(value, is_url: is_url) if value.is_a?(String) # If a block is present, return the result of the block return yield(value, type) if block_given? # Otherwise return the value value end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def value\n @property_hash[:value]\n end", "def property(value)\n merge(property: value.to_s)\n end", "def get_raw_property_value(name)\n return @property_values[name]\n end", "def get_property(property)\n get_compound_value(get_value(property))\n end", "def transform_pr...
[ "0.75339127", "0.7162885", "0.71099174", "0.70154476", "0.685727", "0.67887443", "0.6724902", "0.66939205", "0.66882795", "0.667826", "0.66344774", "0.66299194", "0.6599992", "0.6543717", "0.64816314", "0.64649594", "0.6449784", "0.64259946", "0.6421008", "0.6410926", "0.6410...
0.68610126
4
Removes HTML markup from property values
def transform(value, is_url: false) if is_url # Remove HTML-escaped encodings from URLs without full HTML-stripping CGI.unescape_html(value) elsif STRIP_HTML # Strip HTML preserving block-level whitespace # - Loofah seems to preserve &amp; &quot; etc. so we remove these with # CGI.unescape_html text = CGI.unescape_html(Loofah.fragment(value).to_text) # Collapse all runs of whitespace to a single space text.gsub!(/\s+/, ' ') # Remove leading and trailing whitespace text.strip! # Return the transformed text text else # Return value as-is value end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw_property_value(html, value)\n if value.scalar?\n return html.h(value)\n elsif value.is_a?(HtmlOutput)\n return value.to_html(html)\n elsif value.is_a?(Tilia::Xml::XmlSerializable)\n # There's no default html output for this property, we're going\...
[ "0.65004545", "0.6416042", "0.62413484", "0.6210583", "0.6144749", "0.5946043", "0.5946043", "0.5946043", "0.590286", "0.58979213", "0.5865639", "0.5843693", "0.5795531", "0.5794074", "0.5786897", "0.5749199", "0.5747145", "0.5703195", "0.5694222", "0.5691464", "0.5691464", ...
0.0
-1
Initialize Bigram analysis instance. analysis The main Analysis instance.
def initialize(directory) @directory = directory @table = {} @index = Hash.new{ |h,k| h[k] = [] } end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initialize(*args)\n super\n\n # Save parameters\n if focal_word\n self.focal_word = focal_word.mb_chars.downcase.to_s\n self.focal_word_stem = focal_word.stem\n end\n\n # Extract the stop list if provided\n self.stop_words = []\n ...
[ "0.60718626", "0.56724", "0.5556387", "0.55020416", "0.5490031", "0.5408278", "0.53601986", "0.5353507", "0.5347011", "0.52998775", "0.52591366", "0.5214828", "0.5195328", "0.5185657", "0.51741457", "0.51688296", "0.51621974", "0.51539665", "0.51346606", "0.5134577", "0.51271...
0.0
-1
Get list of bigrams for a given word.
def [](word) @index[word] end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_bigrams(string)\n s = string.downcase\n v = []\n (s.length-1).times{ |i|\n v[i] = s[i...i+2]\n }\n return v\n end", "def matching_bigrams(word1)\n list = @index[word1]\n list.map{ |word2| @table[[word1,word2]] }\n end", "def bigramate(word)\n (0..(word.lengt...
[ "0.72831625", "0.6997435", "0.65989906", "0.6584841", "0.6568877", "0.64586675", "0.63899153", "0.6331027", "0.60479885", "0.584426", "0.5836713", "0.58013535", "0.5772785", "0.5749296", "0.5738618", "0.5575636", "0.5538728", "0.5534428", "0.5527767", "0.5497996", "0.5491881"...
0.0
-1
Add a bigram to the table. If it is already present just count the additional file that contatins it.
def add(word1, word2, file=nil) key = [word1,word2] if @table.key?(key) @table[key].file!(file) if file else bigram = Bigram.new(word1, word2) @table[key] = bigram @table[key].file!(file) @index[word1] << word2 end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word != \"\")\n\t\t @bifreq[prev_word + \" \" + word] += 1\n\t\telse\n\t\t @bifreq[\"PHI \"+word] += 1\n\t\tend \t \t\n\t\tprev_word = word\n\t }\n\t}\n end", "def add(word1, wo...
[ "0.6841609", "0.66230047", "0.6436192", "0.60747945", "0.57095593", "0.56737536", "0.55105084", "0.53605306", "0.53107405", "0.5254298", "0.5250469", "0.52405584", "0.52169746", "0.52101177", "0.5183813", "0.51052535", "0.5077029", "0.50676984", "0.50429875", "0.50318074", "0...
0.6372739
3
Assign birgram. def []=(word1, word2)
def list @table.keys end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initialize word1, word2\n \t@word1 = word1\n \t@word2 = word2\n end", "def adv\n @words1 = Word.all.sample(1)[0].word\n @words2 = Word.all.sample(1)[0].word\n end", "def initialize word \n @word = word\n @guesses = ''\n @wrong_guesses = ''\n end", "def populate(words)\n # Creat...
[ "0.67964894", "0.6368523", "0.63531435", "0.6240786", "0.62122756", "0.61751467", "0.6128019", "0.61179775", "0.61072564", "0.6092386", "0.60768235", "0.60227937", "0.5985246", "0.5985246", "0.5985246", "0.5985246", "0.5979332", "0.5971955", "0.5959876", "0.59452933", "0.5934...
0.0
-1
Iterate over bigram table.
def each_entry(&b) @table.each(&b) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def each\n\t\t @table.each do |pair, bigram|\n yield(bigram)\n\t\t end\n\t\t end", "def load_bigrams\n bigrams = []\n\n begin\n sql = \"SELECT bigram_id, word1, word2, count FROM Bigrams;\"\n stm = @db.prepare sql\n rs = stm.execute\n\t\t while (row = rs.next) d...
[ "0.8577058", "0.6344053", "0.62939036", "0.6023376", "0.596464", "0.58545655", "0.5687088", "0.5656885", "0.56529105", "0.56384695", "0.5635767", "0.5621293", "0.560591", "0.55842483", "0.5555778", "0.5555002", "0.5549917", "0.5517773", "0.54879624", "0.5455607", "0.538293", ...
0.49693602
50
Iterate over each bigram as an instance of Bigram.
def each @table.each do |pair, bigram| yield(bigram) end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_bigrams\n bigrams = []\n\n begin\n sql = \"SELECT bigram_id, word1, word2, count FROM Bigrams;\"\n stm = @db.prepare sql\n rs = stm.execute\n\t\t while (row = rs.next) do\n\t\t id, w1, w2, c = *row\n\t\t bigrams << Bigrams.new(w1, w2, :count=>c, :id=>id)\n\t\t ...
[ "0.6529484", "0.6284862", "0.58925784", "0.55048925", "0.55033886", "0.5483166", "0.547559", "0.54718196", "0.53993124", "0.53735304", "0.53317696", "0.5310152", "0.5298479", "0.52913725", "0.5232824", "0.52292114", "0.522671", "0.5221761", "0.5211718", "0.52070147", "0.51845...
0.7697235
0
Total number of bigrams.
def total @total ||= ( tally = 0 each do |b| tally += b.count end tally ) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word != \"\")\n\t\t @bifreq[prev_word + \" \" + word] += 1\n\t\telse\n\t\t @bifreq[\"PHI \"+word] += 1\n\t\tend \t \t\n\t\tprev_word = word\n\t }\n\t}\n end", "def total_grams\n...
[ "0.74235284", "0.6953551", "0.6816287", "0.67369366", "0.6519049", "0.6369594", "0.6341712", "0.6339302", "0.63020474", "0.62844026", "0.6257695", "0.62269944", "0.62049586", "0.62049586", "0.61644775", "0.6141836", "0.61363196", "0.613448", "0.6115112", "0.61092526", "0.6082...
0.0
-1
Get a list of second words of bigrams matching the given first word.
def matching_bigrams(word1) list = @index[word1] list.map{ |word2| @table[[word1,word2]] } end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_bigrams(string)\n s = string.downcase\n v = []\n (s.length-1).times{ |i|\n v[i] = s[i...i+2]\n }\n return v\n end", "def find_bigrams(str, bigrams)\n bigrams.select { |bigram| str.include?(bigram)}\nend", "def bigramate(word)\n (0..(word.length - 2)).map { |i| word.slice(i,...
[ "0.6646076", "0.6422299", "0.6274176", "0.6154079", "0.5990007", "0.5912253", "0.5783322", "0.56626743", "0.56262785", "0.5603809", "0.5599354", "0.5597414", "0.5511246", "0.54985464", "0.5465181", "0.54564214", "0.54392844", "0.5434038", "0.5416444", "0.5407946", "0.5404497"...
0.73009706
0
Probability of bigram's occurance in the corpus.
def probability(word1, word2=nil) bigram = (Bigram === word1 ? word1 : get(word1, word2)) BigDecimal.new(bigram.count) / total #size end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word != \"\")\n\t\t @bifreq[prev_word + \" \" + word] += 1\n\t\telse\n\t\t @bifreq[\"PHI \"+word] += 1\n\t\tend \t \t\n\t\tprev_word = word\n\t }\n\t}\n end", "def probability_o...
[ "0.73970234", "0.70978487", "0.6869366", "0.68479264", "0.68038917", "0.6757227", "0.6729877", "0.6543553", "0.64418244", "0.6419717", "0.6325295", "0.63038254", "0.62904406", "0.62812465", "0.62567854", "0.62363577", "0.62209475", "0.6212022", "0.62085927", "0.6198233", "0.6...
0.7525785
0
Probability of bigram's occurance in the corpus.
def file_probability(word1, word2=nil) bigram = (Bigram === word1 ? word1 : get(word1, word2)) BigDecimal.new(bigram.files.size) / analysis.files.size end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def probability(word1, word2=nil)\n bigram = (Bigram === word1 ? word1 : get(word1, word2))\n BigDecimal.new(bigram.count) / total #size\n end", "def bigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word != \"\")\n\t\t @bif...
[ "0.7525785", "0.73970234", "0.70978487", "0.6869366", "0.68479264", "0.68038917", "0.6729877", "0.6543553", "0.64418244", "0.6419717", "0.6325295", "0.63038254", "0.62904406", "0.62812465", "0.62567854", "0.62363577", "0.62209475", "0.6212022", "0.62085927", "0.6198233", "0.6...
0.6757227
6
File weighted probablity of the bigram appearing in the corpus. TODO: Don't count file probability.
def score(word1, word2=nil) weight = 1 #file_probability(word1, word2) weight * probability(word1, word2) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def file_probability(word1, word2=nil)\n bigram = (Bigram === word1 ? word1 : get(word1, word2))\n BigDecimal.new(bigram.files.size) / analysis.files.size\n end", "def file_probability(word, threshold=0)\n word = (Word === word ? word : get(word))\n n = 1 # at least one\n ...
[ "0.76515126", "0.714055", "0.71191", "0.6721609", "0.6526299", "0.6516771", "0.63884306", "0.63025784", "0.61543596", "0.61466616", "0.60794854", "0.6042595", "0.59742594", "0.5968507", "0.5966927", "0.5937904", "0.5923168", "0.5881117", "0.5872214", "0.58579516", "0.5833358"...
0.5987286
12
Get a bigram given both words.
def get(word1, word2) @table[[word1,word2]] end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bigramate(word)\n (0..(word.length - 2)).map { |i| word.slice(i, 2) }\n end", "def bigram_compare(word1_bigrams, word2_bigrams)\n most_bigrams = [word1_bigrams.count, word2_bigrams.count].max\n num_matching(word1_bigrams, word2_bigrams).to_f / most_bigrams\n end", "def compare(word1, w...
[ "0.74516493", "0.7052047", "0.6946647", "0.6575785", "0.6469979", "0.631926", "0.6299341", "0.6281823", "0.62817293", "0.62425333", "0.6229352", "0.62130743", "0.6211819", "0.6171229", "0.6164704", "0.6155376", "0.61261", "0.6066925", "0.606135", "0.6058965", "0.6058319", "...
0.0
-1
Sace file counting words and bigrams.
def scan $stderr.print "[bigrams] " last = nil bigram_files.each do |file| $stderr.print "." text = File.read(file).gsub("\n", " ") states = text.split(/[.,:;?!()"]\s*/) states.each do |state| state.scan(WORD) do |word| word = normalize(word) if valid_word?(word) if last && good_bigram?(word) add(last, word, file) end last = word else last = nil end end last = nil end last = nil end $stderr.puts end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def word_count_a_file(file_path)\nend", "def word_count_a_file(file_path)\n word_count = 0\n f = File.open(file_path, \"r\")\n f.each_line {|line|\n word_count += line.to_s.split.size\n }\n word_count\nend", "def word_count_a_file(file_path)\n\tfile = File.new(file_path,'r')\n...
[ "0.6511316", "0.63263327", "0.6272771", "0.62173015", "0.6199744", "0.6187189", "0.6135399", "0.61326754", "0.6131588", "0.61292493", "0.6123231", "0.60655224", "0.6043474", "0.6039884", "0.60201246", "0.60042804", "0.59944147", "0.59934443", "0.599198", "0.59612584", "0.5914...
0.6242591
3
Check if a given word should be considered an acceptable bigram.
def good_bigram?(word) return false if REJECT_BIGRAMS.include?(word) return false if word.size < 2 true end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def legal_word?(word)\n word.length >= 5 && word.length <= 12\n end", "def is_word?(word)\r\n word = word.downcase\r\n word.each_char { |c| return false if not is_letter?(c) }\r\n !word.empty?\r\n end", "def isAntigram(comparedWord)\n String.chars.each(comparedWord)\n end", "def ...
[ "0.68942183", "0.6835693", "0.6639274", "0.66050166", "0.6576086", "0.65585697", "0.6549153", "0.6535717", "0.6491158", "0.64656204", "0.6460502", "0.6431451", "0.6418429", "0.63703954", "0.6333066", "0.6326551", "0.63233995", "0.6312795", "0.63071555", "0.63029563", "0.62864...
0.8423245
0
This method is accessible for testing only.
def configure_canonical(canonical_flag_type, canonical_value_type, canonical_value_label, canonical_value_delim) return unless flag_type.nil? @flag_type = canonical_flag_type return unless canonical_flag_type == :value @value_type = canonical_value_type canonical_value_delim = "" if canonical_value_delim == "=" && flag_style == :short canonical_value_delim = "=" if canonical_value_delim == "" && flag_style == :long @value_delim = canonical_value_delim @value_label = canonical_value_label label = @value_type == :optional ? "[#{@value_label}]" : @value_label @canonical_str = "#{str_without_value}#{@value_delim}#{label}" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def private; end", "def private_method\n end", "def refutal()\n end", "def specie; end", "def specie; end", "def specie; end", "def specie; end", "def spec; end", "def spec; end", "def before_setup; end", "def implementation; end", "def implementation; end", "def setup; end", "def...
[ "0.7710882", "0.6613679", "0.63352877", "0.62930155", "0.62930155", "0.62930155", "0.62930155", "0.62759215", "0.62759215", "0.6206897", "0.6175529", "0.6175529", "0.61700815", "0.61700815", "0.61700815", "0.61700815", "0.61700815", "0.61700815", "0.61700815", "0.61700815", "...
0.0
-1
Whether an exact match of the string was found
def found_exact? @found_exact end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def matched?(str)\r\n # puts \">>> #{self.matcher.source}\"\r\n # puts \">>> #{str}\"\r\n return false if (!self.implicit && str[0] != \"!\")\r\n if (self.matcher =~ str) != nil\r\n $bot.last_match = $~\r\n $log.info(\"/#{self.matcher.source}/ matched #{str}\")\r\n return true\r\n end...
[ "0.7639058", "0.7596726", "0.7488582", "0.746375", "0.74020594", "0.74020594", "0.7239832", "0.71905303", "0.7163009", "0.714809", "0.7112216", "0.70170546", "0.70159245", "0.70122075", "0.7001217", "0.69997746", "0.697129", "0.696923", "0.696923", "0.696923", "0.696923", "...
0.6872769
33
The number of matches that were found.
def count @flags.size end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def matched_count\n @results[MATCHED_COUNT]\n end", "def return_count\n return @matchedCount\n end", "def matched_size\n m = @match\n m.to_s.size if (not m.equal?(nil))\n end", "def count\n Jhead.call(\"-c\", @match, @pattern).split(\"\\n\").size\n end", "def exact_match_count\n ...
[ "0.8698235", "0.8089367", "0.7729435", "0.7666182", "0.765437", "0.75609905", "0.7497716", "0.74399954", "0.73002255", "0.70184815", "0.7002051", "0.6974099", "0.6907366", "0.6902071", "0.6899067", "0.6891406", "0.6891183", "0.68873537", "0.6886565", "0.6863194", "0.6807652",...
0.0
-1
Whether a single unique match was found.
def found_unique? @flags.size == 1 end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def match?\n false\n end", "def one_result\n matched_combined = @matched_names.ids + @matched_tags.ids +\n @matched_queries.ids + @matched_creator.ids +\n @matched_modifier.ids\n return Asq.find(matched_combined[0]) if matched_combined.length == 1\n fa...
[ "0.6884946", "0.6743467", "0.66666603", "0.6646708", "0.6625602", "0.6497823", "0.6478675", "0.64102274", "0.6392388", "0.6372588", "0.6372588", "0.6372588", "0.6364799", "0.6364799", "0.6321445", "0.6251311", "0.624624", "0.61834055", "0.6135826", "0.61332864", "0.6130879", ...
0.74470997
0
Whether no matches were found.
def not_found? @flags.empty? end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def empty?\n matched.empty?\n end", "def nothing_found?\n @matching_documents.nil? or @matching_documents.entries.empty?\n end", "def nil?\n @matches.nil?\n end", "def matched?\n not @match.equal?(nil)\n end", "def matched?\n !failed?\n end", "def expects_none?\n ...
[ "0.7834877", "0.7826205", "0.75819826", "0.74592906", "0.7218907", "0.71993864", "0.7182467", "0.7114309", "0.70080775", "0.69820714", "0.671407", "0.65172404", "0.650716", "0.64751863", "0.6457733", "0.642819", "0.6427544", "0.64200985", "0.64139336", "0.6396393", "0.6380496...
0.6741005
10
Whether multiple matches were found (i.e. ambiguous input).
def found_multiple? @flags.size > 1 end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def match?(given_names); end", "def matched?( other = nil )\n return full? if other.nil?\n matches.include? other\n end", "def multiple_search?\n\t\t@attempts > 1\n\tend", "def multiple_match?\n acknowledgement_detail = locate_element(@original_body, ACKNOWLEDGEMENT_DETAIL_XPATH)\n ...
[ "0.6815673", "0.6742606", "0.6726413", "0.6694285", "0.6620158", "0.6601435", "0.6599376", "0.6591933", "0.6579682", "0.6571317", "0.64545715", "0.6444007", "0.64392936", "0.6410695", "0.63753486", "0.6371641", "0.6356452", "0.62906396", "0.62896234", "0.628535", "0.6248091",...
0.7021841
0
Return whether the unique match was a hit on the negative (`no`) case, or `nil` if not found or not unique.
def unique_flag_negative? found_unique? ? @flags.first[2] : nil end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def matched?\n not @match.equal?(nil)\n end", "def not_a_match(no)\n if no == false\n @e += 1 \n else nil \n end\n end", "def found_unique?\n @flags.size == 1\n end", "def match?\n false\n end", "def has?(arg)\n !!find(arg)\n end", "def opt_unique?(s...
[ "0.62677383", "0.6039822", "0.58324856", "0.5694558", "0.55955046", "0.5581306", "0.55777735", "0.5427091", "0.5362162", "0.5359665", "0.535418", "0.52943647", "0.52836794", "0.5280835", "0.5261147", "0.524693", "0.5243467", "0.52339256", "0.5231494", "0.5227676", "0.5221369"...
0.68606395
0
Returns an array of the matching full flag strings.
def matching_flag_strings @flags.map do |_flag, flag_syntax, negative| negative ? flag_syntax.negative_flag : flag_syntax.positive_flag end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def flags\n match(/Flags\\s+:\\s+(.+)$/).split rescue []\n end", "def flags\n match(/Flags\\s+:\\s+(.+)$/).split rescue []\n end", "def get_flags(*files)\n matches = []\n begin\n files.each do |f|\n file = File.new(f, 'r')\n while (line = file.gets)\n m = line.match(/(^.*=...
[ "0.7122982", "0.7122982", "0.6619203", "0.649433", "0.60035", "0.60031104", "0.59638727", "0.59401083", "0.5879179", "0.5834999", "0.580956", "0.5784173", "0.5769756", "0.5763876", "0.5746046", "0.56907606", "0.56907606", "0.5672787", "0.56592214", "0.5626698", "0.5591124", ...
0.7852811
0
Whether to include short flags
def include_short? @include_short end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def short_flag_syntax\n @short_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :short }\n end", "def short_flag_exist?(flag)\n flags.select { |f| f.short == flag }.any?\n end", "def flags\n [long, short].compact\n end", "def short\n @short\n end", "def s...
[ "0.75528914", "0.71379894", "0.6998727", "0.6910547", "0.6707933", "0.65746135", "0.65517926", "0.6400962", "0.6104154", "0.6086827", "0.607279", "0.60326034", "0.60232764", "0.6019786", "0.5986978", "0.5936265", "0.5927495", "0.5918958", "0.58649856", "0.58321273", "0.582236...
0.77422243
0
Whether to include long flags
def include_long? @include_long end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def long_flag_syntax\n @long_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :long }\n end", "def flags\n [long, negative_long, short].compact\n end", "def flags\n [long, short].compact\n end", "def test_long\n LinuxFortune.long = true\n assert LinuxFortune.long ...
[ "0.7245966", "0.6807072", "0.66324544", "0.6364071", "0.63171667", "0.5945385", "0.5848188", "0.5839984", "0.5796883", "0.5730932", "0.57299966", "0.5712589", "0.5686601", "0.5647302", "0.5640962", "0.5616302", "0.5598856", "0.5587657", "0.5584795", "0.55703974", "0.55698246"...
0.7485818
0
Whether to include negative long flags
def include_negative? @include_negative end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def flags\n [long, negative_long, short].compact\n end", "def include_long?\n @include_long\n end", "def flags\n [long, short].compact\n end", "def long_flag_syntax\n @long_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :long }\n end", "def general_purpose_...
[ "0.7005071", "0.64586365", "0.63733387", "0.6301711", "0.6210912", "0.59782505", "0.5870798", "0.58574307", "0.5744593", "0.56635636", "0.5610551", "0.56060874", "0.55889356", "0.5546935", "0.55281043", "0.5511223", "0.54754025", "0.54588413", "0.5454476", "0.5445743", "0.541...
0.5779511
8
Returns candidates for the current completion.
def call(context) results = if @include_short && @include_long && @include_negative @flag.effective_flags else collect_results end fragment = context.fragment results.find_all { |val| val.start_with?(fragment) } .map { |str| Completion::Candidate.new(str) } end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def candidates\n players.map(&:candidate).compact\n end", "def candidates(s)\n res = []\n candidates_helper(s, 0, res)\n res\n end", "def find_candidates\n scout(exp: expression, depth: 0)\n end", "def file_candidates\n @file_candidates ||= []\n end...
[ "0.6400225", "0.6264429", "0.6093053", "0.60641855", "0.60383046", "0.59851754", "0.59327793", "0.59178853", "0.5872673", "0.58314174", "0.58277404", "0.5827605", "0.57802844", "0.57467395", "0.5732374", "0.5730715", "0.57088095", "0.5698806", "0.5693608", "0.5651471", "0.554...
0.57600236
13
An array of Flag::Syntax including only short (single dash) flags.
def short_flag_syntax @short_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :short } end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dashed_flags *settings_and_names\n settings_and_names.map{|args| dashed_flag_for(*args) }.compact\n end", "def matching_flag_strings\n @flags.map do |_flag, flag_syntax, negative|\n negative ? flag_syntax.negative_flag : flag_syntax.positive_flag\n end\n end", "def flags...
[ "0.68840307", "0.68386215", "0.67959255", "0.67517674", "0.66765195", "0.63936436", "0.6337571", "0.6337571", "0.61796635", "0.61295277", "0.6074809", "0.60621995", "0.60491186", "0.6028328", "0.59460676", "0.592124", "0.5807492", "0.578661", "0.5730183", "0.5730183", "0.5650...
0.72635776
0
An array of Flag::Syntax including only long (doubledash) flags.
def long_flag_syntax @long_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :long } end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def matching_flag_strings\n @flags.map do |_flag, flag_syntax, negative|\n negative ? flag_syntax.negative_flag : flag_syntax.positive_flag\n end\n end", "def flags\n return [] unless options[\"flags\"]\n options[\"flags\"].map do |options|\n Flag.new options\n ...
[ "0.6855186", "0.6651415", "0.66093004", "0.64483005", "0.6438081", "0.6377476", "0.6377476", "0.6302847", "0.6183081", "0.61217344", "0.6095984", "0.6052623", "0.6025771", "0.59871995", "0.5987073", "0.59280264", "0.5864525", "0.5832347", "0.5785188", "0.5709738", "0.5709738"...
0.73501015
0
The list of all effective flags used.
def effective_flags @effective_flags ||= flag_syntax.flat_map(&:flags) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def flags\n FLAGS.find_all{ |k,v| (self.Characteristics & k) != 0 }.map(&:last)\n end", "def flags\n @flags ||= Set.new([])\n end", "def flags\n @flags\n end", "def flags\n @flags\n end", "def whitelisted_flags\n flags.select &:allowed\n end", "def flags\n ...
[ "0.71123785", "0.7064988", "0.677054", "0.677054", "0.6765211", "0.6728041", "0.6672082", "0.65038013", "0.65038013", "0.64296246", "0.6422122", "0.6413207", "0.63778657", "0.63652533", "0.6348326", "0.6332696", "0.6223134", "0.6174319", "0.61449414", "0.6109141", "0.6054181"...
0.8119101
0
Look up the flag by string. Returns an object that indicates whether the given string matched this flag, whether the match was unique, and other pertinent information.
def resolve(str) resolution = Resolution.new(str) flag_syntax.each do |fs| if fs.positive_flag == str resolution.add!(self, fs, false, true) elsif fs.negative_flag == str resolution.add!(self, fs, true, true) elsif fs.positive_flag.start_with?(str) resolution.add!(self, fs, false, false) elsif fs.negative_flag.to_s.start_with?(str) resolution.add!(self, fs, true, false) end end resolution end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def resolve_flag(str)\n result = Flag::Resolution.new(str)\n flags.each do |flag_def|\n result.merge!(flag_def.resolve(str))\n end\n result\n end", "def find_flag(flag_name, flag_state=nil)\n detect do |flag|\n flag.name == flag_name && (flag_state.nil? || flag.state == ...
[ "0.63491124", "0.5969406", "0.5878499", "0.5723808", "0.57181597", "0.5655273", "0.5599379", "0.54669285", "0.5410632", "0.5387644", "0.5373354", "0.5289001", "0.5254369", "0.5249525", "0.5230608", "0.5222844", "0.5198079", "0.51768315", "0.5154593", "0.51312244", "0.51157", ...
0.61936486
1
A list of canonical flag syntax strings.
def canonical_syntax_strings @canonical_syntax_strings ||= flag_syntax.map(&:canonical_str) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def matching_flag_strings\n @flags.map do |_flag, flag_syntax, negative|\n negative ? flag_syntax.negative_flag : flag_syntax.positive_flag\n end\n end", "def scm_flags\n @flags.join(\" \")\n end", "def generate_cli_flags\n @flags.map{|pair| pair.join(' ')}.join(' ')....
[ "0.7066683", "0.69394684", "0.6563283", "0.65382355", "0.65382355", "0.64747995", "0.63405854", "0.63405854", "0.6232747", "0.6224358", "0.61657566", "0.61653584", "0.6146682", "0.6134414", "0.6115361", "0.608132", "0.6080026", "0.60568523", "0.6056155", "0.60351515", "0.6029...
0.8113947
0
Whether this flag is activethat is, it has a nonempty flags list.
def active? !effective_flags.empty? end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def flags?\n !@flags.empty?\n end", "def empty?\n flags.empty?\n end", "def complete_flags?\n @complete_flags\n end", "def flagged?\n !(%w(flagged) & flags).empty?\n end", "def flagged?\n !(%w(flagged) & flags).empty?\n end", "def is_flagged?\n return se...
[ "0.7888239", "0.7835755", "0.7354448", "0.72545123", "0.71221733", "0.682791", "0.6746434", "0.6713567", "0.66841596", "0.6674785", "0.66655344", "0.6608056", "0.6527947", "0.6527947", "0.6504524", "0.65022874", "0.64490014", "0.64420986", "0.64420986", "0.64420986", "0.64022...
0.81619895
0
GET /agencyfeed.json Get all the agency feed available
def index @agencyfeeds = AgencyFeed.all render :index, status: :ok end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def feeds\n all.select { |c| c.google_id =~ /^feed/ }\n end", "def index\n @feed_sources = FeedSource.all\n end", "def index\n @feeds = Feed.all\n end", "def index\n @feeds = Feed.all\n end", "def index\n @feeds = Feed.all\n end", "def index\n @feeds = Feed.all\n end",...
[ "0.65294206", "0.64663476", "0.6448661", "0.6448661", "0.6448661", "0.6448661", "0.6448661", "0.63958734", "0.63335794", "0.62580824", "0.62517816", "0.62203056", "0.6194411", "0.6180867", "0.6121181", "0.6120239", "0.61178607", "0.6113262", "0.610972", "0.6106641", "0.610293...
0.768968
0
POST /agencyfeed.json Create agency with params
def create @agencyfeed = AgencyFeed.create! agencyfeed_params render :show, status: :created end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create\n @agency = Agency.new(agency_params)\n\n if @agency.save\n render json: @agency, status: :created, location: @agency\n else\n render json: @agency.errors, status: :unprocessable_entity\n end\n end", "def create\n @agency = Agency.new(agency_params)\n\n if @agency....
[ "0.7524271", "0.74258316", "0.7219422", "0.69869757", "0.6933055", "0.6844494", "0.6835852", "0.66698694", "0.6668519", "0.6613647", "0.660936", "0.6579963", "0.6416047", "0.64136416", "0.63984895", "0.63452095", "0.6283765", "0.62245864", "0.6209959", "0.61748606", "0.608486...
0.7636001
0
PUT/PATCH agencyfeed.json update the configuration of agency and category
def update @agencyfeed.update! agencyfeed_params render :show, status: :ok end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update\n json_update(category,category_params, Category)\n end", "def UpdateCategory params = {}\n \n APICall(path: 'categories.json',method: 'PUT',payload: params.to_json)\n \n end", "def update!(**args)\n @category = args[:category] if args.key?(:category)\n ...
[ "0.65837145", "0.64573747", "0.6200794", "0.6153669", "0.61454356", "0.60933644", "0.6020954", "0.6004011", "0.5993255", "0.5993255", "0.59900177", "0.5969397", "0.59490365", "0.5918142", "0.5916518", "0.59069264", "0.59056515", "0.5890757", "0.5870441", "0.5852912", "0.58415...
0.69128275
0
GET /agencyfeed/:id/fetch_news.json fetch all the news and update db for a specific agencyfeed.
def fetch_news @news = News.fetch_and_store_news! @agencyfeed render template: 'news/list', status: :ok end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fetch\n ##\n # an array of { category_id: number, news: array }\n @fetched = News.fetch_and_store_news_from_all_agency_feed!\n render :fetch, status: :ok\n end", "def fetch_data\n rss_provider = RssProvider.find(params[:format])\n unless rss_provider.rss_url.include?(\"indiatvnews...
[ "0.7513578", "0.68720174", "0.68716896", "0.6681916", "0.6586323", "0.6517632", "0.6508943", "0.6480259", "0.64605004", "0.6435497", "0.6354102", "0.6225285", "0.62074596", "0.61961514", "0.61918014", "0.61565053", "0.61554474", "0.61537933", "0.6133251", "0.6123011", "0.6104...
0.81743777
0
returns "STANDARD_CLAIM_PROCESS", "BDD_PROGRAM", or "FDC_PROGRAM" based off of a few attributes in the evss data
def evss_claims_process_type(form526) if form526['bddQualified'] return 'BDD_PROGRAM' elsif form526['standardClaim'] return 'STANDARD_CLAIM_PROCESS' end 'FDC_PROGRAM' end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_system(code)\n return code[2][1][0][1][1][1]\n end", "def proc_name\n data = read_cpuinfo.match(/model name\\s*:\\s*(.+)/)[1]\n\n return data.strip\n end", "def getSecurityEvent( event_id )\r\n\r\nputs case event_id\r\nwhen 4608 \r\n return \"Startup\"\r\nwhen 4609 \r\n ...
[ "0.56413835", "0.5547117", "0.55038613", "0.5410344", "0.54024947", "0.54024947", "0.53961504", "0.5374258", "0.53301316", "0.5328355", "0.52793306", "0.52515835", "0.5239102", "0.52355474", "0.52233654", "0.52154016", "0.52050954", "0.52020055", "0.51988494", "0.5191708", "0...
0.7315199
0
returns either 'Active', 'Reserves' or 'National Guard' based on the service branch
def convert_to_service_component(service_branch) service_branch = service_branch.downcase return 'Reserves' if service_branch.include?('reserves') return 'National Guard' if service_branch.include?('national guard') 'Active' end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def branch_of_service\n SERVICE_BRANCHES[branch_of_service_code]\n end", "def hca_branch_of_service\n HCA_SERVICE_BRANCHES[branch_of_service_code] || 'other'\n end", "def get_branch \n branch = case @os_svninfo['URL']\n when /trunk/ then \"trunk\"\n when /branches\\/private\...
[ "0.7212287", "0.7156464", "0.6361214", "0.619933", "0.60374534", "0.5984082", "0.5964916", "0.59352756", "0.59222555", "0.59166205", "0.5904862", "0.5904862", "0.5887707", "0.5879764", "0.5865017", "0.584003", "0.5736813", "0.5725427", "0.56407464", "0.5610731", "0.5597073", ...
0.8002263
0
def all_mandatory_attributes_are_present? result = true if params[:characteristic_type] == 'work' if params[:client_characteristic]['start_date(1i)'].present? && params[:client_characteristic]['start_date(2i)'].present? sd_month = params[:client_characteristic]['start_date(2i)'].to_i sd_day = 1 params[:client_characteristic]['start_date(3i)'].to_i sd_year = params[:client_characteristic]['start_date(1i)'].to_i params[:client_characteristic][:start_date] = Date.civil(sd_year,sd_month,sd_day) else result = false end if params[:client_characteristic]['end_date(1i)'].present? || params[:client_characteristic]['end_date(2i)'].present? if params[:client_characteristic]['end_date(1i)'].present? && params[:client_characteristic]['end_date(2i)'].present? sd_month = params[:client_characteristic]['end_date(2i)'].to_i sd_day = 1 params[:client_characteristic]['end_date(3i)'].to_i sd_year = params[:client_characteristic]['end_date(1i)'].to_i end_date = Date.civil(sd_year,sd_month,sd_day) params[:client_characteristic][:end_date] = end_date.end_of_month else result = false
def set_hoh_data() li_member_id = params[:household_member_id].to_i @household_member = HouseholdMember.find(li_member_id) @household = Household.find(@household_member.household_id) # @head_of_household_name = HouseholdMember.get_hoh_name(@household.id) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mandatory_work_characteristic(arg_pgu_adults_collection,arg_week_start_date,arg_week_end_date)\n mandatory_characteristic_present = false\n if arg_pgu_adults_collection\n\t arg_pgu_adults_collection.each do |each_client|\n\t\t mandatory_characteristic = ClientCharacteristic.has_mandatory_work_chara...
[ "0.6687193", "0.6550766", "0.6403036", "0.6324402", "0.63084084", "0.6258411", "0.6142323", "0.6122722", "0.6114756", "0.6105565", "0.6075499", "0.60739017", "0.60556495", "0.6054276", "0.6021295", "0.60072136", "0.59912485", "0.5980923", "0.597026", "0.5969717", "0.59487826"...
0.0
-1
application controller will attempt to determine layout based on params or current page unless it is specified on the subclass
def determine_layout return @__layout if @__layout return false if params[:_no_layout] || request.xhr? @__layout ||= current_page.layout.try(:template) if current_page.present? @__layout ||= 'application/default' @__layout end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def layout_for_page\n if params[:layout] == 'none' || params[:layout] == 'false'\n false\n else\n params[:layout] || 'application'\n end\n end", "def determine_layout\n ['show'].include?(action_name) ? 'application' : nil \n end", "def layout\n self.class.layout || @a...
[ "0.71749175", "0.7162006", "0.7027256", "0.6980907", "0.6917183", "0.68840367", "0.6877608", "0.6793758", "0.67735374", "0.67581904", "0.6729416", "0.67260855", "0.6697271", "0.66890544", "0.66890544", "0.6635262", "0.66183877", "0.6584618", "0.6550475", "0.6541904", "0.65337...
0.70369416
2
GET /variant_images GET /variant_images.json
def index @variant_images = VariantImage.all respond_to do |format| format.html # index.html.erb format.json { render json: @variant_images } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_variant\n @product = Spree::Product.find_by :slug => params[:product_id]\n @variant = @product.find_variant_by_options(params[:ids].split(','))\n if @variant\n respond_to do |format|\n format.json {render json: {variant_id: @variant.id, image_ids: @variant.image_ids}}\n end\n e...
[ "0.72741795", "0.72619855", "0.7065044", "0.7058683", "0.68506616", "0.6846892", "0.6785741", "0.67399204", "0.66757697", "0.6659554", "0.6637402", "0.6637402", "0.6605622", "0.6602113", "0.6601908", "0.65993196", "0.6594103", "0.65900457", "0.6587071", "0.65866446", "0.65840...
0.7974657
0
GET /variant_images/1 GET /variant_images/1.json
def show @variant_image = VariantImage.find(params[:id]) @variant = @variant_image.variant respond_to do |format| format.html # show.html.erb format.js format.json { render json: @variant_image } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def index\n @variant_images = VariantImage.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @variant_images }\n end\n end", "def get_variant\n @product = Spree::Product.find_by :slug => params[:product_id]\n @variant = @product.find_variant_by_o...
[ "0.7800791", "0.7155099", "0.70267254", "0.69889295", "0.6987502", "0.6987502", "0.67943203", "0.6752776", "0.66671115", "0.66671115", "0.66671115", "0.66671115", "0.6665982", "0.6631958", "0.6629154", "0.6613807", "0.6606048", "0.66020113", "0.66002864", "0.65955347", "0.658...
0.7340816
1
GET /variant_images/new GET /variant_images/new.json
def new @variant_image = VariantImage.new @variant = Variant.find(params[:variant_id]) @variant_image.variant_id = @variant.id @product = @variant.product respond_to do |format| format.html # new.html.erb format.js # new.js.erb format.json { render json: @variant_image } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end", "def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end"...
[ "0.7331562", "0.7331562", "0.7331562", "0.7331562", "0.7331562", "0.7331562", "0.7296701", "0.72743815", "0.7256295", "0.71241343", "0.71211594", "0.7083276", "0.70256233", "0.70135015", "0.69998044", "0.6992117", "0.6989176", "0.6984019", "0.697423", "0.697423", "0.6973233",...
0.7732069
0
POST /variant_images POST /variant_images.json
def create params[:variant_image][:image2] = params[:variant_image][:image] # For image replication across two s3 accounts @variant_image = VariantImage.new(params[:variant_image]) @variant = @variant_image.variant @product = @variant.product respond_to do |format| if @variant_image.save format.html { redirect_to @variant, notice: 'Image added successfully.' } format.js { redirect_to @variant_image, notice: 'Image added successfully.' } format.json { render json: @variant_image, status: :created, location: @variant_image } else format.html { render action: "new" } format.js { render action: "new" } format.json { render json: @variant_image.errors, status: :unprocessable_entity } end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def new\n @variant_image = VariantImage.new\n @variant = Variant.find(params[:variant_id])\n @variant_image.variant_id = @variant.id\n @product = @variant.product\n\n respond_to do |format|\n format.html # new.html.erb\n format.js # new.js.erb\n format.json { render json: @variant_ima...
[ "0.65627486", "0.65037525", "0.6433147", "0.64226496", "0.6414937", "0.6413051", "0.63565314", "0.6343757", "0.6338299", "0.6301123", "0.6272123", "0.62499154", "0.6220626", "0.617524", "0.617524", "0.61609745", "0.6148033", "0.612654", "0.60613704", "0.6023113", "0.6020223",...
0.72633463
0
PUT /variant_images/1 PUT /variant_images/1.json
def update @variant_image = VariantImage.find(params[:id]) respond_to do |format| if @variant_image.update_attributes(params[:variant_image]) format.html { redirect_to @variant_image, notice: 'Variant image was successfully updated.' } format.json { head :ok } else format.html { render action: "edit" } format.json { render json: @variant_image.errors, status: :unprocessable_entity } end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create\n params[:variant_image][:image2] = params[:variant_image][:image] # For image replication across two s3 accounts\n @variant_image = VariantImage.new(params[:variant_image])\n\t\t@variant = @variant_image.variant\n @product = @variant.product \n\n respond_to do |format|\n if @variant...
[ "0.685779", "0.6613139", "0.65422946", "0.65422946", "0.64632577", "0.6349292", "0.62428784", "0.62285197", "0.6191053", "0.61872923", "0.6173035", "0.6151853", "0.6150835", "0.6141934", "0.61195475", "0.61183", "0.6104693", "0.6087452", "0.60852027", "0.6080466", "0.60675687...
0.7366369
0
DELETE /variant_images/1 DELETE /variant_images/1.json
def destroy @variant_image = VariantImage.find(params[:id]) @variant = @variant_image.variant @variant_image.destroy respond_to do |format| format.html { redirect_to @variant.product } format.json { head :ok } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def destroy\n #Finds selected image\n @image = Image.find(params[:id])\n #destroy image\n @image.destroy\n respond_to do |format|\n format.html { redirect_to '/admin' }\n format.json { head :ok }\n end\n end", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n ...
[ "0.71142924", "0.7108093", "0.7081805", "0.7033761", "0.6999009", "0.69973284", "0.69886255", "0.6982736", "0.69627947", "0.6953001", "0.6953001", "0.6953001", "0.6953001", "0.6953001", "0.6953001", "0.6943084", "0.691388", "0.69135904", "0.69056183", "0.68884355", "0.688669"...
0.76444316
0
Return an absolute path within the working directory. The working directory is determined: value of BREWED_WORKING_DIR env var When run_mode is :daemon, the working dir is state_dir. Otherwise, the current directory.
def working_dir(*path) if _working_dir.nil? @_working_dir = ENV['PROJECT_WORKING_DIR'] if _working_dir != nil @_working_dir = Pathname.new(expand_variables _working_dir) Dir.chdir _working_dir.to_s elsif run_mode == :daemon @_working_dir = state_dir Dir.chdir _working_dir.to_s else @_working_dir = Pathname.getwd end raise "working_dir not a directory: #{_working_dir.safe_s}" unless _working_dir.directory? end [_working_dir, *path].reduce(:+) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def working_dir\n ENV['PWD'] || Dir.pwd\n end", "def working_dir\n @_working_dir ||= File.expand_path(options['merb-root'] || Dir.pwd)\n end", "def working_dir\n @_working_dir ||= File.expand_path(options['merb-root'] || Dir.pwd)\n end", "def working_dir\n @_working_dir ||= File.ex...
[ "0.7353413", "0.70150113", "0.70150113", "0.70150113", "0.6645491", "0.6588737", "0.6554053", "0.6401448", "0.6390032", "0.63678426", "0.6304743", "0.6300227", "0.6096617", "0.60856885", "0.6035078", "0.6032209", "0.5986275", "0.5929604", "0.58885217", "0.58795285", "0.587952...
0.735547
0
Provide the absolute path to this Brewed's lib dir.
def libdir() LIBDIR end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def lib_path; end", "def lib_path; end", "def lib_path; end", "def _lib_dir\n File.join(get_pref(\"sketchbook.path\"), \"libraries\")\n end", "def lib_dir\n LIB_DIR\n end", "def lib_dir\n File.join(root, 'lib')\n end", "def lib\n File.join(@root, 'lib')\n end", "def path\n ...
[ "0.7990427", "0.7990427", "0.7990427", "0.79743385", "0.7927845", "0.7907323", "0.77938616", "0.7615941", "0.7605704", "0.7605704", "0.7589429", "0.7589429", "0.7515817", "0.7508454", "0.7396474", "0.73448074", "0.72190243", "0.72182333", "0.7168761", "0.7134664", "0.7123414"...
0.79928905
0
Returns the current host's name in canonical form (lowercase with domain information stripped).
def hostname() unless @host.is_str? @host = ENV['HOSTNAME'] @host = `/bin/hostname` unless @host.is_str? raise "Failed to determine current HOSTNAME" unless @host.is_str? @host = @host.downcase.sub(/\..*$/, '').strip raise "Failed to determine current HOSTNAME" unless @host.is_str? end @host = @host.to_sym end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def canonical\n dns_host_name\n end", "def canonical\n dns_name\n end", "def canonicalize_hostname(hostname)\n Addrinfo.getaddrinfo(hostname, nil, nil, nil, nil, Socket::AI_CANONNAME).first.canonname\n end", "def canonical_cname(cname)\n # DNS host names are case-insensitive....
[ "0.8175684", "0.7700664", "0.7595606", "0.7432824", "0.7319022", "0.725847", "0.7250591", "0.7250591", "0.72492355", "0.7109443", "0.7090248", "0.6883639", "0.6878133", "0.6850288", "0.68029255", "0.6796222", "0.67310333", "0.6676832", "0.66764206", "0.6668549", "0.6668549", ...
0.7147614
9
Provide an absolute pathname within the current brewed's directory tree when provided relative path components.
def path(*path) [dir, *path].reduce(:+) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def absolute_path(path, reference = @pwd)\n path = File.expand_path(File.join(reference, path)) unless path.start_with? '/'\n path\n end", "def relative_path_from(from); end", "def abspath path\n if path[0] != '/'\n @homedir + '/' + path\n else\n path\n end\n end", "de...
[ "0.7357174", "0.7348903", "0.73462665", "0.7299915", "0.7296331", "0.72358", "0.71377474", "0.7120952", "0.7062218", "0.704706", "0.70403504", "0.70403504", "0.70402324", "0.7012378", "0.7000483", "0.7000483", "0.6994019", "0.69636405", "0.69636405", "0.69636405", "0.69636405...
0.0
-1
Provide an absolute pathname within the brewed's public directory tree.
def public(*path) _public.nil? ? nil : [_public, *path].reduce(:+) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def absolute_path(path)\n File.join(self.mounting_point.path, 'public', path)\n end", "def public_path=(_arg0); end", "def absolute_url\n domain + path\n end", "def public_path\n # TODO: this might present an attack vector if the file is outside the web_root\n options[:web_r...
[ "0.72411776", "0.6842936", "0.6810786", "0.6754875", "0.6727674", "0.6700948", "0.6620673", "0.65827525", "0.65568286", "0.6503647", "0.6487266", "0.64696175", "0.6449953", "0.64441746", "0.64441746", "0.64441746", "0.63983196", "0.63731486", "0.6365032", "0.6364572", "0.6353...
0.0
-1
Provide the absolute path to the directory containing the log files.
def log(*path) [log_root, *path].reduce(:+) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def log_path\n case Merb::Config[:log_file]\n when String then File.dirname(Merb::Config[:log_file])\n else Merb.root_path(\"log\")\n end\n end", "def log_path\n case Merb::Config[:log_file]\n when String then File.dirname(Merb::Config[:log_file])\n else Merb.root_path(\"log...
[ "0.8008632", "0.8008632", "0.796843", "0.78381485", "0.7828726", "0.7653239", "0.7590869", "0.7574525", "0.7451976", "0.73812646", "0.7275158", "0.725862", "0.7146819", "0.71467555", "0.7051824", "0.70465666", "0.7016004", "0.6988616", "0.69094336", "0.689888", "0.689093", ...
0.0
-1
Provide an absolute path within the current Brewed's state dir.
def state(*path) [state_dir, *path].reduce(:+) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def state_path(path); end", "def state_file_path\n File.join(RIGHT_LINK_SPEC_HELPER_TEMP_PATH, '__state.js')\n end", "def base_path\n Dir.pwd + \"/\"\n end", "def abspath\n \"#{repo_base_path}/#{self.git_repo_path}\"\n end", "def path\n '/' + path_states.map(&:name)[1..-1].jo...
[ "0.7103151", "0.67575806", "0.66171044", "0.6490168", "0.64491934", "0.6438729", "0.640515", "0.6382533", "0.63662404", "0.6337073", "0.6257011", "0.62559515", "0.6231415", "0.6207783", "0.61874485", "0.6147358", "0.6074683", "0.6069712", "0.60665756", "0.6039887", "0.6025222...
0.57346314
82
=============================================================================== Moon phases and Zodiac =============================================================================== Calculates the phase of the moon. 0 New Moon 1 Waxing Crescent 2 First Quarter 3 Waxing Gibbous 4 Full Moon 5 Waning Gibbous 6 Last Quarter 7 Waning Crescent
def moonphase(time=nil) # in UTC time = pbGetTimeNow if !time transitions = [ 1.8456618033125, 5.5369854099375, 9.2283090165625, 12.9196326231875, 16.6109562298125, 20.3022798364375, 23.9936034430625, 27.6849270496875] yy = time.year-((12-time.mon)/10.0).floor j = (365.25*(4712+yy)).floor + (((time.mon+9)%12)*30.6+0.5).floor + time.day+59 j -= (((yy/100.0)+49).floor*0.75).floor-38 if j>2299160 j += (((time.hour*60)+time.min*60)+time.sec)/86400.0 v = (j-2451550.1)/29.530588853 v = ((v-v.floor)+(v<0 ? 1 : 0)) ag = v*29.53 for i in 0...transitions.length return i if ag<=transitions[i] end return 0 end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def moonphase(time) # in UTC\n transitions=[\n 1.8456618033125,\n 5.5369854099375,\n 9.2283090165625,\n 12.9196326231875,\n 16.6109562298125,\n 20.3022798364375,\n 23.9936034430625,\n 27.6849270496875]\n yy=time.year-((12-time.mon)/10.0).floor\n j=(365.25*(4712+yy)).floor + (((ti...
[ "0.7135922", "0.6571321", "0.60677487", "0.5964015", "0.5677756", "0.56502366", "0.56364274", "0.55768466", "0.550457", "0.546058", "0.54316163", "0.54068017", "0.53350204", "0.5332118", "0.5297813", "0.52955884", "0.52828133", "0.52504575", "0.52006185", "0.5193434", "0.5191...
0.72937435
0
Calculates the zodiac sign based on the given month and day: 0 is Aries, 11 is Pisces. Month is 1 if January, and so on.
def zodiac(month,day) time = [ 3,21,4,19, # Aries 4,20,5,20, # Taurus 5,21,6,20, # Gemini 6,21,7,20, # Cancer 7,23,8,22, # Leo 8,23,9,22, # Virgo 9,23,10,22, # Libra 10,23,11,21, # Scorpio 11,22,12,21, # Sagittarius 12,22,1,19, # Capricorn 1,20,2,18, # Aquarius 2,19,3,20 # Pisces ] for i in 0...12 return i if month==time[i*4] && day>=time[i*4+1] return i if month==time[i*4+2] && day<=time[i*4+3] end return 0 end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def zodiac(month,day)\n time=[\n 1,1,1,31, # The Apprentice\n 2,1,2,28, # The Companion\n 3,1,3,31, # The Beacon\n 4,1,4,30, # The Savage\n 5,1,5,31, # The Prodigy\n 6,1,6,30, # The Martyr\n 7,1,7,31, # The Maiden\n 8,1,8,31, # The Gladiator\n 9,1,9,30, # The ...
[ "0.7116016", "0.69404125", "0.6122316", "0.6122316", "0.5967227", "0.59496856", "0.5906849", "0.58797467", "0.58797467", "0.5717164", "0.57162505", "0.57162505", "0.5711818", "0.5694669", "0.5647944", "0.5647944", "0.5646241", "0.5644094", "0.56300116", "0.55933905", "0.55865...
0.72031826
0
Returns the opposite of the given zodiac sign. 0 is Aries, 11 is Pisces.
def zodiacOpposite(sign) return (sign+6)%12 end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def negative\n abs.flip_sign\n end", "def reverse_sign(an_integer)\n return 0 - an_integer\nend", "def reverse_sign(an_integer)\n return 0 - an_integer\nend", "def opposite(number)\r\n return number * (-1)\r\nend", "def zodiacValue(sign)\n return (sign)%12\nend", "def opposite(number)\n re...
[ "0.6672668", "0.6554936", "0.6554936", "0.65460396", "0.6495725", "0.64195913", "0.6415371", "0.641393", "0.6412919", "0.6399172", "0.6367187", "0.63448083", "0.62792665", "0.6030122", "0.60131836", "0.59967226", "0.5991377", "0.5991377", "0.59625506", "0.59225416", "0.592254...
0.78304255
0
0 is Aries, 11 is Pisces.
def zodiacPartners(sign) return [(sign+4)%12,(sign+8)%12] end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cardinal; end", "def kcallipidos\n\t\t\t@lipidos * 9\n\t\tend", "def rentas\n profesion ? 1 : 0\n end", "def pontosStrike index\n\t\tif @rolls[index + 2]\n\t\t\tif @rolls[index + 2] == 10\n\t\t\t\tif @rolls[index + 4]\n\t\t\t\t\t10 + 10 + @rolls[index + 4]\n\t\t\t\telse\n\t\t\t\t\t10 + 10\n\t\t\t\ten...
[ "0.61682594", "0.5817779", "0.5750655", "0.56960154", "0.5677398", "0.5660142", "0.56399095", "0.5547494", "0.5535588", "0.5528255", "0.5483012", "0.54519147", "0.5434948", "0.54230285", "0.541586", "0.5403975", "0.53857213", "0.5367293", "0.5358745", "0.53388107", "0.5316248...
0.0
-1
0 is Aries, 11 is Pisces.
def zodiacComplements(sign) return [(sign+1)%12,(sign+11)%12] end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cardinal; end", "def kcallipidos\n\t\t\t@lipidos * 9\n\t\tend", "def rentas\n profesion ? 1 : 0\n end", "def pontosStrike index\n\t\tif @rolls[index + 2]\n\t\t\tif @rolls[index + 2] == 10\n\t\t\t\tif @rolls[index + 4]\n\t\t\t\t\t10 + 10 + @rolls[index + 4]\n\t\t\t\telse\n\t\t\t\t\t10 + 10\n\t\t\t\ten...
[ "0.61682594", "0.5817779", "0.5750655", "0.56960154", "0.5677398", "0.5660142", "0.56399095", "0.5547494", "0.5535588", "0.5528255", "0.5483012", "0.54519147", "0.5434948", "0.54230285", "0.541586", "0.5403975", "0.53857213", "0.5367293", "0.5358745", "0.53388107", "0.5316248...
0.0
-1
=============================================================================== Days of the week ===============================================================================
def pbIsWeekday(wdayVariable,*arg) timenow = pbGetTimeNow wday = timenow.wday ret = false for wd in arg ret = true if wd==wday end if wdayVariable>0 $game_variables[wdayVariable] = [ _INTL("Sunday"), _INTL("Monday"), _INTL("Tuesday"), _INTL("Wednesday"), _INTL("Thursday"), _INTL("Friday"), _INTL("Saturday")][wday] $game_map.need_refresh = true if $game_map end return ret end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def day_of_week\n dnum = day\n dnum -= 10 if dnum > 20\n dnum -= 10 if dnum > 10\n dnum -= 1\n dnum\n end", "def day_of_week(date)\n 7 - date.cwday\n end", "def dayOfWeek\n days = 0;\n tempYear = MIN_YEAR\n while tempYear < @year\n days += SimpleDate.da...
[ "0.81641483", "0.78555655", "0.7836805", "0.7820718", "0.7798211", "0.7739801", "0.77106684", "0.7653655", "0.76395094", "0.756583", "0.754693", "0.75292236", "0.7491989", "0.7478662", "0.7452789", "0.7443966", "0.7425835", "0.73917234", "0.73585266", "0.7314526", "0.7245878"...
0.0
-1
check to see if line item exists in the inventories if it does mark the 3pl on the line_item
def check_line_item_in_inventory(line_item) # feature flipper if Features.inactive?(:refulfill) return true end found = false if rii = find_match_and_decrement_available(line_item) if line_item.order.shipping_address.country.name == 'United States' && rii.vendor == 'bergen' line_item.return_inventory_item = rii line_item.refulfill_status = 'new' found = true line_item.save elsif line_item.order.shipping_address.country.name == 'Australia' && rii.vendor == 'next' line_item.return_inventory_item = rii line_item.refulfill_status = 'new' found = true line_item.save end end found end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def has_line_item?\n true\n end", "def has_line_items?\n line_items.any?\n end", "def line_item_items_exist_in_inventory\n self.line_items.each do |line_item|\n next unless line_item.item\n inventory_item = self.from.inventory_items.find_by(item: line_item.item)\n if inventory_item.ni...
[ "0.7575127", "0.72356254", "0.71560377", "0.7098415", "0.7065873", "0.7059247", "0.7028707", "0.6990496", "0.69901574", "0.6968333", "0.6947016", "0.69288427", "0.6928456", "0.6886449", "0.68685293", "0.68636286", "0.6862128", "0.6854091", "0.68417156", "0.68411756", "0.68411...
0.7079307
4
match by upc first, then try matching via properties, returns return_inventory_item or nil
def find_match_and_decrement_available(line_item) gs = Orders::LineItemPresenter.new(line_item).global_sku if rii = ReturnInventoryItem.where(["upc= ? and active = true and available > 0", gs&.id]).first rii.available -= 1 rii.save elsif gs #do this check since global skus are jacked up and can't be trusted gs = GlobalSku.where( style_number: gs.style_number, product_name: gs.product_name, size: gs.size, color_id: gs.color_id, customisation_id: gs.customisation_id, height_value: gs.height_value, product_id: gs.product_id, ).first if rii = ReturnInventoryItem.where(["upc = ? and active = true and available > 0", gs&.id]).first rii.available -= 1 rii.save end end rii end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find_item_by_product(product, items)\n\t\tsku = product.sap_sku.to_s.upcase\n\t\tsku_options = sku.match(/\\-?[MV]$/) ? [sku, sku.gsub(/\\-?[MV]$/, '')] : sku_options = [\"#{sku}-V\", \"#{sku}-M\", \"#{sku}V\", \"#{sku}M\", sku]\n\n\t\titem = nil\n\t\tsku_options.each do |s|\n\t\t\tif item = items[s]\n\t\t\t\t...
[ "0.5888857", "0.5794193", "0.57222027", "0.55928975", "0.55855024", "0.5517034", "0.5466443", "0.5454543", "0.54469365", "0.5445379", "0.54404634", "0.5355519", "0.5347431", "0.534116", "0.53241557", "0.52610016", "0.52512985", "0.5249569", "0.5247355", "0.52398074", "0.52338...
0.56648356
3
unmark a lineitem for refulfillment, chose not to increment the inventory count due possibility that inventory was refreshed, better to err on side of less inventory
def unrefulfill_line_item(line_item_id) li = Spree::LineItem.find(line_item_id) li.refulfill = nil li.save end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def decrement_line_item_quantity(line_item_id)\n current_item = line_items.find(line_item_id)\n if current_item.quantity > 1\n current_item.quantity -= 1\n else\n current_item.destroy\n end\n current_item\n end", "def recover_usage(quantity_to_be_recovered)\n self.used_quantity -= qu...
[ "0.6766502", "0.6419233", "0.6412429", "0.63957113", "0.6360755", "0.63582975", "0.6263182", "0.6224137", "0.6224137", "0.6195032", "0.61344975", "0.6108307", "0.60007876", "0.59919155", "0.59865284", "0.5963751", "0.59574777", "0.5936173", "0.5917403", "0.59152454", "0.58837...
0.6989728
0
Cookbook Name:: tieredchefserver Libraries:: helpers
def server_file(uri) require 'pathname' require 'uri' Pathname.new(URI.parse(uri).path).basename.to_s end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_helper(node)\n Chef::RemoteRecipe.factory(node)\n end", "def initialSSHTasks(ssh)\n win_env_fix = %q{echo 'export PATH=\"$PATH:/cygdrive/c/opscode/chef/embedded/bin\"' > \"$HOME/chef-client\"; echo 'prev_dir=\"`pwd`\"; for __dir in /proc/registry/HKEY_LOCAL_MACHINE/SYSTEM/CurrentCo...
[ "0.6365945", "0.6294414", "0.6217326", "0.6217326", "0.6217326", "0.62059665", "0.6134021", "0.5994452", "0.598766", "0.5887639", "0.5840201", "0.58226264", "0.5754595", "0.5734258", "0.57215744", "0.571388", "0.5710585", "0.5701464", "0.5689198", "0.56596404", "0.5651977", ...
0.0
-1
G => E | v F => B => A C => D
def topo_sort(dependencies) # tarjan's algorithm dependencies.default = [] # no need for #default_proc because array never gets mutated seen = {} ordering = [] dependencies.keys.each do |vertex| resolve!(vertex, dependencies, ordering, seen) unless seen[vertex] end ordering end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def gamma_decomposition\r\n k = 0; comparability=true; classification={}\r\n edges.map {|edge| [edge.source,edge.target]}.each do |e|\r\n if classification[e].nil?\r\n k += 1\r\n classification[e] = k; classification[e.reverse] = -k\r\n comparability &&= gratr_comparabil...
[ "0.518043", "0.5179151", "0.51606786", "0.5020858", "0.5017522", "0.49492475", "0.49427435", "0.49024993", "0.48651722", "0.48567945", "0.4855347", "0.48414737", "0.48189622", "0.48189622", "0.47811005", "0.47811005", "0.47726956", "0.47606915", "0.47451782", "0.4734499", "0....
0.0
-1
Get the current rockreleasebased prefix for rock packages
def rock_release_prefix(release_name = nil) release_name ||= rock_release_name if release_name pkg_prefix_base + "-#{release_name}-" else pkg_prefix_base + "-" end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rock_ruby_release_prefix(release_name = nil)\n rock_release_prefix(release_name) + \"ruby-\"\n end", "def base_prefix\n Starter::Config.read[:prefix]\n end", "def prefix\n regexify(bothify(fetch('aircraft.prefix')))\n end", "def prefixed_label(package...
[ "0.72332764", "0.67280084", "0.65037626", "0.63198924", "0.6315063", "0.61651635", "0.6145332", "0.6092354", "0.60850835", "0.6037899", "0.6021138", "0.5943009", "0.5934455", "0.5924296", "0.5882006", "0.585637", "0.58560544", "0.58445877", "0.583058", "0.5828399", "0.5825916...
0.7785496
0
Get the current rockreleasebased prefix for rock(ruby) packages
def rock_ruby_release_prefix(release_name = nil) rock_release_prefix(release_name) + "ruby-" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rock_release_prefix(release_name = nil)\n release_name ||= rock_release_name\n if release_name\n pkg_prefix_base + \"-#{release_name}-\"\n else\n pkg_prefix_base + \"-\"\n end\n ...
[ "0.7867165", "0.66621006", "0.6451794", "0.64171606", "0.6342943", "0.6251559", "0.62242717", "0.620227", "0.61056644", "0.6062629", "0.60344064", "0.6009978", "0.5993054", "0.5879808", "0.5875949", "0.5858529", "0.58561265", "0.58542913", "0.5852562", "0.5847498", "0.5840739...
0.76407176
1
The debian name of a package either rock[] or for ruby packages rock[]ruby and the releasename can be avoided by setting with_rock_release_prefix to false
def debian_name(pkginfo, with_rock_release_prefix = true, release_name = nil) if pkginfo.kind_of?(String) raise ArgumentError, "method debian_name expects a PackageInfo as argument, got: #{pkginfo.class} '#{pkginfo}'" end name = pkginfo.name debianize_name(name, build_type: pkginfo.build_type, with_rock_release_prefix: with_rock_release_prefix, release_name: release_name) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def debian_name(pkg)\n if pkg.kind_of?(Autoproj::RubyPackage)\n debian_ruby_name(pkg.name)\n else\n \"rock-\" + canonize(pkg.name)\n end\n end", "def debian_ruby_name(name, with_rock_release_prefix = true, release_name =...
[ "0.826601", "0.81080985", "0.7972412", "0.77185374", "0.7390493", "0.7352227", "0.6973002", "0.6898783", "0.68203634", "0.6602259", "0.6598977", "0.65763", "0.64230126", "0.64132184", "0.6339344", "0.63144815", "0.6306574", "0.62908", "0.62867904", "0.6243514", "0.62379354", ...
0.776134
3
Create a debian package name from a given plain name according to build type, release name and release_prefix setting
def debianize_name(name, build_type: :cmake, with_rock_release_prefix: true, release_name: rock_release_name) if build_type == :ruby if with_rock_release_prefix rock_release_prefix(release_name) + "ruby-" + Deb.canonize(name) else pkg_prefix_base + "-ruby-" + Deb.canonize(name) end else if with_rock_release_prefix rock_release_prefix(release_name) + Deb.canonize(name) else pkg_prefix_base + "-" + Deb.canonize(name) end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def debian_name(pkginfo, with_rock_release_prefix = true, release_name = nil)\n if pkginfo.kind_of?(String)\n raise ArgumentError, \"method debian_name expects a PackageInfo as argument, got: #{pkginfo.class} '#{pkginfo}'\"\n end\n nam...
[ "0.79385996", "0.7263144", "0.71180815", "0.70813316", "0.7042691", "0.7033252", "0.6661746", "0.6645817", "0.66052526", "0.6584799", "0.65558136", "0.64697367", "0.6411772", "0.6375237", "0.6351443", "0.6343119", "0.63246405", "0.6296833", "0.6254793", "0.62546974", "0.62287...
0.8442843
0
The debian name of a meta package rock[]meta and the releasename can be avoided by setting with_rock_release_prefix to false
def debian_meta_name(name, with_rock_release_prefix = true) if with_rock_release_prefix rock_release_prefix + "meta-" + Deb.canonize(name) else pkg_prefix_base + "meta-" + Deb.canonize(name) end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def debian_name(pkg)\n if pkg.kind_of?(Autoproj::RubyPackage)\n debian_ruby_name(pkg.name)\n else\n \"rock-\" + canonize(pkg.name)\n end\n end", "def rock_release_prefix(release_name = nil)\n release_n...
[ "0.7454804", "0.7094609", "0.7083253", "0.69993", "0.6651239", "0.6450723", "0.6414287", "0.6396047", "0.6259771", "0.62469524", "0.6229925", "0.62116957", "0.6173754", "0.601299", "0.6005974", "0.5993302", "0.5990938", "0.58022726", "0.5784182", "0.578295", "0.57796746", "...
0.8754608
0
The debian name of a package [rock]ruby and the releasename prefix can be avoided by setting with_rock_release_prefix to false
def debian_ruby_name(name, with_rock_release_prefix = true, release_name = nil) if with_rock_release_prefix rock_ruby_release_prefix(release_name) + Deb.canonize(name) else "ruby-" + Deb.canonize(name) end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def debian_name(pkg)\n if pkg.kind_of?(Autoproj::RubyPackage)\n debian_ruby_name(pkg.name)\n else\n \"rock-\" + canonize(pkg.name)\n end\n end", "def debian_meta_name(name, with_rock_release_prefix = true)\n ...
[ "0.8155794", "0.7976292", "0.78808445", "0.7630828", "0.7524989", "0.7514469", "0.6860359", "0.68594384", "0.6740509", "0.671844", "0.66456854", "0.65610325", "0.648769", "0.6372021", "0.63600284", "0.6351584", "0.63399094", "0.6310774", "0.63089454", "0.6305904", "0.629034",...
0.82506204
0
Commit changes of a debian package using dpkgsource commit in a given directory (or the current one by default)
def dpkg_commit_changes(patch_name, directory = Dir.pwd, prefix: "apaka-", logfile: nil, include_removal: false ) Dir.chdir(directory) do Packager.debug ("commit changes to debian pkg: #{patch_name}") # Since dpkg-source will open an editor we have to # take this approach to make it pass directly in an # automated workflow ENV['EDITOR'] = "/bin/true" cmd = ["dpkg-source", "--commit"] cmd << "--include-removal" if include_removal cmd << "." cmd << prefix + patch_name if !system(*cmd, [:out, :err] => redirection(logfile,"a"), :close_others => true) raise RuntimeError, "#{self.class}#{__method__}: failed to commit #{patch_name}" end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dpkg_commit_changes(patch_name, directory = Dir.pwd)\n Dir.chdir(directory) do\n Packager.info (\"commit changes to debian pkg: #{patch_name}\")\n # Since dpkg-source will open an editor we have to\n # take this approach to make it pass di...
[ "0.7835676", "0.64839965", "0.5969037", "0.5942226", "0.5895876", "0.58421993", "0.5836047", "0.5763407", "0.57430965", "0.57420176", "0.56336474", "0.55841255", "0.5577395", "0.55551475", "0.55478275", "0.5509306", "0.54801506", "0.54568374", "0.54221076", "0.53568935", "0.5...
0.7725159
1
Generate the debian/ subfolder cindlugin control/rules/install files to prepare the debian package build instructions
def generate_debian_dir(pkginfo, dir, options) options, unknown_options = Kernel.filter_options options, :distribution => nil, :override_existing => true, :patch_dir => nil distribution = options[:distribution] # Prepare fields for template package_info = pkginfo debian_name = debian_name(pkginfo) debian_version = debian_version(pkginfo, distribution) versioned_name = versioned_name(pkginfo, distribution) short_documentation = pkginfo.short_documentation documentation = pkginfo.documentation origin_information = pkginfo.origin_information source_files = pkginfo.source_files upstream_name = pkginfo.name copyright = pkginfo.copyright license = pkginfo.licenses deps = @dep_manager.filtered_dependencies(pkginfo) #debian names of rock packages deps_rock_packages = deps[:rock] deps_osdeps_packages = deps[:osdeps] deps_nonnative_packages = deps[:nonnative].to_a.flatten.compact dependencies = (deps_rock_packages + deps_osdeps_packages + deps_nonnative_packages).flatten build_dependencies = dependencies.dup this_rock_release = TargetPlatform.new(rock_release_name, target_platform.architecture) @rock_autobuild_deps[pkginfo.build_type].each do |pkginfo| name = debian_name(pkginfo) build_dependencies << this_rock_release.packageReleaseName(name) end # To handle postinstall DEFAULT_BUILD_DEPENDENCIES.each do |dep| build_dependencies << dep end DEFAULT_RUNTIME_DEPENDENCIES.each do |dep| dependencies << dep end if pkginfo.build_type == :cmake build_dependencies << "cmake" elsif pkginfo.build_type == :orogen build_dependencies << "cmake" orogen_command = pkginfo.orogen_command elsif pkginfo.build_type == :autotools if pkginfo.using_libtool build_dependencies << "libtool" end build_dependencies << "autotools-dev" # as autotools seems to be virtual... build_dependencies << "autoconf" build_dependencies << "automake" build_dependencies << "dh-autoreconf" elsif pkginfo.build_type == :ruby if pkginfo.is_bundle? build_dependencies << "cmake" else raise "debian/control: cannot handle ruby package" end elsif pkginfo.build_type == :archive_importer || pkginfo.build_type == :importer_package build_dependencies << "cmake" else raise "debian/control: cannot handle package type #{pkginfo.build_type} for #{pkginfo.name}" end Packager.info "Required OS Deps: #{deps_osdeps_packages}" Packager.info "Required Nonnative Deps: #{deps_nonnative_packages}" dir = cleanup_existing_dir(dir, options) existing_debian_dir = File.join(pkginfo.srcdir,"debian") template_dir = if File.directory?(existing_debian_dir) existing_debian_dir else TEMPLATES end FileUtils.mkdir_p dir Find.find(template_dir) do |path| next if File.directory?(path) template = ERB.new(File.read(path), nil, "%<>", path.gsub(/[^w]/, '_')) rendered = template.result(binding) target_path = File.join(dir, Pathname.new(path).relative_path_from(Pathname.new(template_dir)).to_s) FileUtils.mkdir_p File.dirname(target_path) File.open(target_path, "w") do |io| io.write(rendered) end end if options[:patch_dir] whitelist = [ "debian/rules","debian/control","debian/install" ] if patch_pkg_dir(pkginfo.name, options[:patch_dir], whitelist: whitelist, pkg_dir: pkginfo.srcdir, options: patch_options()) Packager.warn "Overlay patch applied to debian folder of #{pkginfo.name}" end end ######################## # debian/compat ######################## compatfile = File.join(dir,"compat") set_compat_level(DEBHELPER_DEFAULT_COMPAT_LEVEL, compatfile) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def install\n run \"bundle exec backup generate:config --config-path=config/backup\" unless File.exists?(\"config/backup/config.rb\")\n template \"general.rb\", \"config/backup/models/general.rb\"\n if File.exists? \".env\"\n append_file \".env\" do\n File.read(F...
[ "0.63612086", "0.62468463", "0.62115836", "0.619243", "0.6188494", "0.61588025", "0.6139201", "0.6111842", "0.6008117", "0.60080624", "0.60006094", "0.59659886", "0.59646237", "0.5957808", "0.5957808", "0.5948403", "0.59406453", "0.59402007", "0.5930946", "0.59274274", "0.592...
0.62992585
1
Generate the debian_dir for a meta package i.e. representing a package set or a full release return [String] the main packages directory
def generate_debian_dir_meta(name, depends, base_dir: Dir.pwd, version: "0.1", distribution: nil) existing_debian_dir = File.join("#{name}-#{version}","debian-meta") template_dir = if File.directory?(existing_debian_dir) existing_debian_dir else TEMPLATES_META end dir = File.join(base_dir, "debian") FileUtils.mkdir_p dir debian_name = debian_meta_name(name) debian_version = "#{version}" if distribution debian_version += '~' + distribution end deps_rock_packages = depends deps_osdeps_packages = [] deps_nonnative_packages = [] Packager.info "Required OS Deps: #{deps_osdeps_packages}" Packager.info "Required Nonnative Deps: #{deps_nonnative_packages}" Find.find(template_dir) do |path| next if File.directory?(path) template = ERB.new(File.read(path), nil, "%<>", path.gsub(/[^w]/, '_')) begin rendered = template.result(binding) rescue puts "Error in #{path}:" raise end target_path = File.join(dir, Pathname.new(path).relative_path_from(Pathname.new(template_dir)).to_s) FileUtils.mkdir_p File.dirname(target_path) File.open(target_path, "w") do |io| io.write(rendered) end end return dir end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_debian_dir(pkginfo, dir, options)\n options, unknown_options = Kernel.filter_options options,\n :distribution => nil,\n :override_existing => true,\n :patch_dir => nil\n\n distribution = options[...
[ "0.7038908", "0.6894775", "0.6833736", "0.681512", "0.6646762", "0.6568039", "0.6338617", "0.63289803", "0.62737525", "0.62679255", "0.6170336", "0.61601603", "0.6086764", "0.5999166", "0.59521484", "0.5935174", "0.5926287", "0.5916547", "0.589468", "0.5887863", "0.58775705",...
0.7265215
0
A tar gzip version that reproduces same checksums on the same day when file content does not change Required to package orig.tar.gz
def tar_gzip(archive, tarfile, pkg_time, distribution = nil, logfile: nil) # Make sure no distribution information leaks into the package if distribution and archive =~ /~#{distribution}/ archive_plain_name = archive.gsub(/~#{distribution}/,"") FileUtils.cp_r archive, archive_plain_name else archive_plain_name = archive end Packager.info "Tar archive: #{archive_plain_name} into #{tarfile}" # Make sure that the tar files checksum remains the same by # overriding the modification timestamps in the tarball with # some external source timestamp and using gzip --no-name # # exclude hidden files an directories mtime = pkg_time.iso8601() # Exclude hidden files and directories at top level cmd_tar = "tar --mtime='#{mtime}' --format=gnu -c --exclude '.+' --exclude-backups --exclude-vcs --exclude #{archive_plain_name}/debian --exclude build #{archive_plain_name} | gzip --no-name > #{tarfile}" if system(cmd_tar, [:out,:err] => redirection(logfile, "a")) Packager.info "Package: successfully created archive using command '#{cmd_tar}' -- pwd #{Dir.pwd} -- #{Dir.glob("**")}" checksum = `sha256sum #{tarfile}` Packager.info "Package: sha256sum: #{checksum}" return true else Packager.info "Package: failed to create archive using command '#{cmd_tar}' -- pwd #{Dir.pwd}" return false end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def tar_gz_file\n \"#{package_name}.tar.gz\"\n end", "def get_gzipped_backup\n tar_file = get_tempfile\n safe_run \"tar -czf #{tar_file} #{tar_dir}\"\n tar_file\n end", "def compress_source_tgz(path)\n tarfile = Tempfile.create([\"vagrant\", \".tar\"])\n tarfile.close\n t...
[ "0.6814556", "0.6727206", "0.6657132", "0.6571898", "0.6375211", "0.63011515", "0.6265729", "0.62573904", "0.6230874", "0.6166431", "0.6116192", "0.6047838", "0.6046607", "0.60226125", "0.602238", "0.6006378", "0.5954265", "0.5954103", "0.59446114", "0.5941396", "0.59183174",...
0.72869587
0
Package selection is a collection of pkginfo
def package_selection(selection, force_update: nil, patch_dir: nil, package_set_dir: nil, use_remote_repository: false) sync_packages = {} selected_gems = [] selection.each_with_index do |pkginfo, i| pkg_name = pkginfo.name pkg = pkginfo.pkg Autoproj.message "Packaging #{pkg_name} (#{i + 1}/#{selection.size})", :green # Making sure all packages that require base/cmake due to using Rock CMake macros have # a dependency on base/cmake if File.file?(File.join(pkg.srcdir, "CMakeLists.txt")) cmakelists_txt = File.read(File.join(pkg.srcdir, "CMakeLists.txt")) if cmakelists_txt =~ /include\(Rock\)|Rock\.cmake/ || cmakelists_txt =~ /find_package\(Rock\)/ pkg.depends_on "base/cmake" unless pkg.name == "base/cmake" end end begin options = {:force_update => force_update, :patch_dir => patch_dir, :package_set_dir => package_set_dir} if !use_remote_repository options[:existing_source_dir] = pkg.srcdir end # just to update the required gem property selected_gems.concat pkginfo.dependencies[:extra_gems] # Perform the actual packaging package(pkginfo, options) sync_packages[pkg_name] = { :debian_name => debian_name(pkginfo), :build_deps => build_dependencies(pkginfo), :type => :package } rescue Interrupt raise rescue Exception => e Apaka::Packaging.warn "failed to package #{pkg.name}: #{e.message} #{e.backtrace}" next end end [sync_packages, selected_gems.uniq] end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main_package_set\n each_package_set.find(&:main?)\n end", "def relevant_packages\n packages.select { |p| p['version'] == version }\n end", "def expand_package_selection(selection, filter: true)\n result = PackageSelection.new\n\n all_selected_packages = s...
[ "0.68138766", "0.66908056", "0.66548544", "0.6567489", "0.64760077", "0.64440465", "0.63981324", "0.6337504", "0.63027155", "0.6271943", "0.62538314", "0.62475944", "0.6244942", "0.6211936", "0.6166513", "0.6117742", "0.6111971", "0.60975456", "0.6069746", "0.60353863", "0.60...
0.6541327
4
Package the given package unless already registerd in reprepro if an existing source directory is given this will be used for packaging, otherwise the package will be bootstrapped
def package(pkginfo, options = Hash.new) options, unknown_options = Kernel.filter_options options, :force_update => false, :patch_dir => nil, :distribution => nil, # allow to override global settings :architecture => nil options[:distribution] ||= target_platform.distribution_release_name options[:architecture] ||= target_platform.architecture debian_pkg_name = debian_name(pkginfo) if options[:force_update] dirname = packaging_dir(pkginfo) if File.directory?(dirname) Packager.info "Debian: rebuild requested -- removing #{dirname}" FileUtils.rm_rf(dirname) end end options[:packaging_dir] = packaging_dir(pkginfo) options[:release_name] = rock_release_name begin # Set the current pkginfo to set the install directory # correctly # FIXME: needs to be refactored # @packager_lock.lock @current_pkg_info = pkginfo pkginfo = prepare_source_dir(pkginfo, options.merge(unknown_options)) if pkginfo.build_type == :orogen || pkginfo.build_type == :cmake || pkginfo.build_type == :autotools package_default(pkginfo, options) elsif pkginfo.build_type == :ruby # Import bundles since they do not need to be build and # they do not follow the typical structure required for gem2deb if pkginfo.name =~ /bundles/ package_importer(pkginfo, options) else package_ruby(pkginfo, options) end elsif pkginfo.build_type == :archive_importer || pkginfo.build_type == :importer_package package_importer(pkginfo, options) else raise ArgumentError, "Debian: Unsupported package type #{pkginfo.build_type} for #{pkginfo.name}" end ensure @current_pkg_info = nil @packager_lock.unlock end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def package\n unless @package\n @package = package_resource_class.new(download_dest, run_context)\n tailor_package_to_platform\n end\n @package\n end", "def bundle_package(*args, &block)\n ruby_package(*args) do |pkg|\n Autoproj.env_add_path 'ROCK_BUNDLE_PATH...
[ "0.67403233", "0.6588731", "0.6479387", "0.64547145", "0.6374927", "0.63533336", "0.6345991", "0.6284882", "0.6275911", "0.6258917", "0.6255152", "0.62313044", "0.6127586", "0.60981727", "0.60876894", "0.6075155", "0.6019445", "0.60131645", "0.6012386", "0.6005589", "0.599253...
0.61835426
12
Package the given meta package if an existing source directory is given this will be used for packaging, otherwise the package will be bootstrapped
def package_meta(name, depend, version: "0.1", force_update: false, distribution: nil, architecture: nil) debian_pkg_name = debian_meta_name(name) if force_update dirname = packaging_dir(debian_pkg_name) if File.directory?(dirname) Packager.info "Debian: rebuild requested -- removing #{dirname}" FileUtils.rm_rf(dirname) end end distribution ||= target_platform.distribution_release_name architecture ||= target_platform.architecture packaging_dir = packaging_dir(debian_pkg_name) if not File.directory?(packaging_dir) FileUtils.mkdir_p packaging_dir end package_deb_meta(name, depend, version: version, distribution: distribution, packaging_dir: packaging_dir) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def package\n unless @package\n @package = package_resource_class.new(download_dest, run_context)\n tailor_package_to_platform\n end\n @package\n end", "def source_package(options)\n package_common(options) do |pkg|\n pkg.srcdir = pkg.name\n yield(pk...
[ "0.68598044", "0.68513906", "0.6561744", "0.632393", "0.624938", "0.62227345", "0.62220913", "0.6191581", "0.619106", "0.61131114", "0.6092626", "0.60886014", "0.6073014", "0.60682124", "0.602863", "0.59846896", "0.59642756", "0.5947927", "0.5936238", "0.5925864", "0.5917892"...
0.561658
51
Update the debian directory with overlay and env.sh/env.yml
def update_debian_dir(pkginfo, options) # Generate the debian directory generate_debian_dir(pkginfo, pkginfo.srcdir, options) if options[:patch_dir] && File.exist?(options[:patch_dir]) if patch_pkg_dir(pkginfo.name, options[:patch_dir], whitelist: nil, pkg_dir: pkginfo.srcdir, options: patch_options()) Packager.warn "Overlay patch applied to #{pkginfo.name}" end Dir.chdir(pkginfo.srcdir) do process_apaka_control("apaka.control") end end dpkg_commit_changes("overlay", pkginfo.srcdir, logfile: options[:logfile], include_removal: true) envyml = File.join(pkginfo.srcdir, "env.yml") Packager.warn("Preparing env.yml #{envyml}") patch_yml = {} if File.exists?(envyml) patch_yml = YAML.load_file(envyml) end env_data = pkginfo.generate_env_data("APAKA__" + Packaging.as_var_name(pkginfo.name), rock_install_directory, base_data: patch_yml) File.open(envyml, "w") do |file| file.write(env_data.to_yaml) end dpkg_commit_changes("envyml", pkginfo.srcdir, logfile: options[:logfile]) envsh = File.join(pkginfo.srcdir, "env.sh") Packager.warn("Preparing env.sh #{envsh}") File.open(envsh, "a") do |file| env_txt = pkginfo.envsh(env_data) file.write(env_txt) end dpkg_commit_changes("envsh", pkginfo.srcdir, logfile: options[:logfile]) # Run dpkg-source # Use the new tar ball as source if !system("dpkg-source", "-I", "-b", pkginfo.srcdir, [:out, :err] => redirection(options[:logfile],"a"), :close_others => true) Packager.warn "Package: #{pkginfo.name} failed to perform dpkg-source -- #{Dir.entries(pkginfo.srcdir)}" raise RuntimeError, "Debian: #{pkginfo.name} failed to perform dpkg-source in #{pkginfo.srcdir}" end ["#{versioned_name(pkginfo, options[:distribution])}.debian.tar.gz", "#{plain_versioned_name(pkginfo)}.orig.tar.gz", "#{versioned_name(pkginfo, options[:distribution])}.dsc"] end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setup debian_repository\n write_config debian_repository\n restart\n end", "def etc_update \n announcing 'Running etc-update' do\n system(\"chroot #{$chrootdir} /scripts/run.sh update_configs\")\n end\n send_to_state('build', 'etc_update')\n end", "def apt_update(vm)\n...
[ "0.721814", "0.6581492", "0.6412914", "0.62594265", "0.6220686", "0.61962616", "0.60835385", "0.59578365", "0.5928997", "0.58600885", "0.58563536", "0.57297826", "0.56680995", "0.56155807", "0.5538033", "0.55320096", "0.550211", "0.5484689", "0.54720473", "0.545635", "0.54522...
0.7720927
0
Build package locally return path to locally build file
def build_local(pkg_name, debian_pkg_name, versioned_build_dir, deb_filename, options) options, unknown_options = Kernel.filter_options options, :distributions => nil, :parallel_build_level => nil filepath = build_dir # cd package_name # tar -xf package_name_0.0.debian.tar.gz # tar -xf package_name_0.0.orig.tar.gz # mv debian/ package_name_0.0/ # cd package_name_0.0/ # debuild -us -uc # #to install # cd .. # sudo dpkg -i package_name_0.0.deb Packager.info "Building #{pkg_name} locally with arguments: pkg_name #{pkg_name}," \ " debian_pkg_name #{debian_pkg_name}," \ " versioned_build_dir #{versioned_build_dir}" \ " deb_filename #{deb_filename}" \ " options #{options}" begin FileUtils.chdir File.join(build_dir, debian_pkg_name, target_platform.to_s.gsub("/","-")) do if File.exist? "debian" FileUtils.rm_rf "debian" end if File.exist? versioned_build_dir FileUtils.rm_rf versioned_build_dir end FileUtils.mkdir versioned_build_dir debian_tar_gz = Dir.glob("*.debian.tar.gz") debian_tar_gz.concat Dir.glob("*.debian.tar.xz") if debian_tar_gz.empty? raise RuntimeError, "#{self} could not find file: *.debian.tar.gz in #{Dir.pwd}" else debian_tar_gz = debian_tar_gz.first cmd = ["tar", "-xf", debian_tar_gz] if !system(*cmd, :close_others => true) raise RuntimeError, "Packager: '#{cmd.join(" ")}' failed" end end orig_tar_gz = Dir.glob("*.orig.tar.gz") if orig_tar_gz.empty? raise RuntimeError, "#{self} could not find file: *.orig.tar.gz in #{Dir.pwd}" else orig_tar_gz = orig_tar_gz.first cmd = ["tar"] cmd << "-x" << "--strip-components=1" << "-C" << versioned_build_dir << "-f" << orig_tar_gz if !system(*cmd, :close_others => true) raise RuntimeError, "Packager: '#{cmd.join(" ")}' failed" end end FileUtils.mv 'debian', versioned_build_dir + '/' FileUtils.chdir versioned_build_dir do cmd = ["debuild", "-us", "-uc"] if options[:parallel_build_level] cmd << "-j#{options[:parallel_build_level]}" end if !system(*cmd, :close_others => true) raise RuntimeError, "Packager: '#{cmd}' failed" end end filepath = Dir.glob("*.deb") if filepath.size < 1 raise RuntimeError, "No debian file generated in #{Dir.pwd}" elsif filepath.size > 1 raise RuntimeError, "More than one debian file available in #{Dir.pwd}: #{filepath}" else filepath = filepath.first end end rescue Exception => e msg = "Package #{pkg_name} has not been packaged -- #{e}" Packager.error msg raise RuntimeError, msg end filepath end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def build_path\n @build_path ||= Pathname.new(source_dir).join(data['build_path'] || './build').to_s\n end", "def build_root()\n \"#{project_root}/build\"\n end", "def output_path\n \"build\"\n end", "def build_path\n @build_path ||= \"#{@ant_path}/#{@config[:build_name]}\"\n end"...
[ "0.7177168", "0.71356404", "0.7130146", "0.7112139", "0.70762694", "0.704427", "0.70293415", "0.6967646", "0.6964182", "0.6778268", "0.6737313", "0.6720476", "0.6702654", "0.66772133", "0.6621196", "0.6613639", "0.6557876", "0.65513676", "0.65442437", "0.65442437", "0.6502655...
0.58402926
86
Install package name, where pkg is the debian package name
def install(pkg_name) begin pkg_build_dir = packaging_dir(pkg_name) filepath = Dir.glob("#{pkg_build_dir}/*.deb") if filepath.size < 1 raise RuntimeError, "No debian file found for #{pkg_name} in #{pkg_build_dir}: #{filepath}" elsif filepath.size > 1 raise RuntimeError, "More than one debian file available in #{pkg_build_dir}: #{filepath}" else filepath = filepath.first Packager.info "Found package: #{filepath}" end install_debfile(filepath) rescue Exception => e raise RuntimeError, "Installation of package '#{pkg_name} failed -- #{e}" end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def install_package host, package_name\n host.install_package package_name\n end", "def install(pkg)\n package pkg do\n action :install\n end\nend", "def install(packagename, force=false)\n\t\t\t\traise(InstallError, \"Automated package installation is not implemented on OpenBSD\")\n\t\t\tend"...
[ "0.7270667", "0.7244659", "0.7169353", "0.70323354", "0.7023716", "0.6977101", "0.6951155", "0.6922259", "0.6880965", "0.68577975", "0.684019", "0.6804202", "0.6784884", "0.67558724", "0.6749211", "0.6739366", "0.67260814", "0.67077416", "0.67010355", "0.6678488", "0.6654429"...
0.7763115
0
We create a diff between the existing orig.tar.gz and the source directory to identify if there have been any updates Using 'diff' allows us to apply this test to all kind of packages
def package_updated?(pkginfo) # append underscore to make sure version definition follows registered_orig_tar_gz = reprepro.registered_files(debian_name(pkginfo) + "_", rock_release_name, "*.orig.tar.gz") orig_file_names = Dir.glob("#{debian_name(pkginfo)}*.orig.tar.gz") orig_file_names.each do |file| FileUtils.rm file end if registered_orig_tar_gz.empty? Packager.info "Apaka::Packaging::Debian::package_updated?: no existing orig.tar.gz found in reprepro" else Packager.info "Apaka::Packaging::Debian::package_updated?: existing orig.tar.gz found in reprepro: #{registered_orig_tar_gz}" FileUtils.cp registered_orig_tar_gz.first, Dir.pwd end # Find an existing orig.tar.gz in the build directory # ignoring the current version-timestamp orig_file_name = Dir.glob("#{debian_name(pkginfo)}*.orig.tar.gz") if orig_file_name.empty? Packager.info "No filename found for #{debian_name(pkginfo)} (existing files: #{Dir.entries('.')} -- package requires update (regeneration of orig.tar.gz)" return true elsif orig_file_name.size > 1 raise RuntimeError, "Multiple versions of package #{debian_name(pkginfo)} in #{Dir.pwd} -- you have to fix this first" else orig_file_name = orig_file_name.first end !equal_pkg_content?(pkginfo, orig_file_name) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def package_updated?(pkg)\n # Find an existing orig.tar.gz in the build directory\n # ignoring the current version-timestamp\n orig_file_name = Dir.glob(\"#{debian_name(pkg)}*.orig.tar.gz\")\n if orig_file_name.empty?\n return true\n ...
[ "0.69058627", "0.6773206", "0.6525139", "0.6017702", "0.5980599", "0.5980599", "0.5971509", "0.5917137", "0.5833289", "0.57902545", "0.5734645", "0.56653184", "0.5664062", "0.56551045", "0.56526834", "0.5645943", "0.5645111", "0.5638795", "0.56369454", "0.56161374", "0.561372...
0.6427643
3
Compute the ruby arch setup for passing through sed escaping is required for using with file rendering no escaping is required
def ruby_arch_setup(do_escape = false) Packager.info "Creating ruby env setup" if do_escape setup = Regexp.escape("arch=$(shell gcc -print-multiarch)\n") # Extract the default ruby version to build for on that platform # this assumes a proper setup of /usr/bin/ruby setup += Regexp.escape("ruby_ver=$(shell ruby -r rbconfig -e ") + "\\\"print RbConfig::CONFIG[\'ruby_version\']\\\")" + Regexp.escape("\n") setup += Regexp.escape("ruby_arch_dir=$(shell ruby -r rbconfig -e ") + "\\\"print RbConfig::CONFIG[\'archdir\']\\\")" + Regexp.escape("\n") setup += Regexp.escape("ruby_libdir=$(shell ruby -r rbconfig -e ") + "\\\"print RbConfig::CONFIG[\'rubylibdir\']\\\")" + Regexp.escape("\n") setup += Regexp.escape("rockruby_archdir=$(subst /usr,,$(ruby_arch_dir))\n") setup += Regexp.escape("rockruby_libdir=$(subst /usr,,$(ruby_libdir))\n") else setup = "arch=$(shell gcc -print-multiarch)\n" # Extract the default ruby version to build for on that platform # this assumes a proper setup of /usr/bin/ruby setup += "ruby_ver=$(shell ruby -r rbconfig -e \"print RbConfig::CONFIG[\'ruby_version\']\")\n" setup += "ruby_arch_dir=$(shell ruby -r rbconfig -e \"print RbConfig::CONFIG[\'archdir\']\")\n" setup += "ruby_libdir=$(shell ruby -r rbconfig -e \"print RbConfig::CONFIG[\'rubylibdir\']\")\n" setup += "rockruby_archdir=$(subst /usr,,$(ruby_arch_dir))\n" setup += "rockruby_libdir=$(subst /usr,,$(ruby_libdir))\n" end Packager.info "Ruby env setup is:\n#{setup}" setup end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def shell_ruby_platform\n `ruby -rrbconfig -e \"puts RbConfig::CONFIG['sitearchdir']\"`\n end", "def archs_for_command cmd\n cmd = cmd.to_s # If we were passed a Pathname, turn it into a string.\n cmd = `/usr/bin/which #{cmd}` unless Pathname.new(cmd).absolute?\n cmd.gsub! ' ', '\\\\ ' # Escape spaces in...
[ "0.5715107", "0.5702827", "0.5696714", "0.560849", "0.533406", "0.5309851", "0.5295959", "0.5290549", "0.5278227", "0.52700776", "0.52656204", "0.5238063", "0.52332914", "0.521695", "0.5212728", "0.521216", "0.5198948", "0.5113907", "0.50973797", "0.50776345", "0.50765723", ...
0.68672913
0
Define the default compat level
def set_compat_level(compatlevel = DEBHELPER_DEFAULT_COMPAT_LEVEL, compatfile = "debian/compat") if File.exist?(compatfile) existing_compatlevel = `cat #{compatfile}`.strip Packager.warn "Apaka::Packaging::Debian::set_compat_level: existing '#{compatfile}' with compatlevel #{existing_compatlevel}" end Packager.info "Setting debian compat level to: #{compatlevel}" `echo #{compatlevel} > #{compatfile}` end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bump_level\n return :major if @options[:major]\n return :minor if @options[:minor]\n return :patch if @options[:patch]\n end", "def compat\n\t\tmodule_info['Compat'] || {}\n\tend", "def level\n [MAJOR, MINOR, PATCH, PRE].compact.join(\".\")\n end", "def compatibility_mode(compat...
[ "0.66932446", "0.6421182", "0.6386833", "0.6287057", "0.61542505", "0.6137305", "0.6110456", "0.5946597", "0.5921266", "0.58900195", "0.5840121", "0.5821903", "0.5821903", "0.58121353", "0.5804544", "0.5798878", "0.5747369", "0.5746987", "0.572297", "0.57138383", "0.5693415",...
0.7287851
0
Compute the build dependencies for a packag info object return [Array] list of dependencies
def build_dependencies(pkginfo) dependencies = [] pkgdeps = pkginfo.dependencies deps = pkgdeps[:rock_pkginfo].select do |pkginfo| pkg_name = debian_name(pkginfo, true) !rock_release_platform.ancestorContains(pkg_name) end .map { |p| p.name } gems = pkgdeps[:nonnative].select do |gem,version| pkg_ruby_name = debian_ruby_name(gem, false) pkg_prefixed_name = debian_ruby_name(gem, true) !( rock_release_platform.ancestorContains(gem) || rock_release_platform.ancestorContains(pkg_ruby_name) || rock_release_platform.ancestorContains(pkg_prefixed_name)) end .map{ |p| p[0] } deps.concat(gems) deps end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compute_revdeps\n result = Hash.new { |h, k| h[k] = Set.new }\n each_autobuild_package do |pkg|\n pkg.dependencies.each do |pkg_name|\n result[pkg_name] << pkg.name\n end\n pkg.optional_dependencies.each do |pkg_name|\n ...
[ "0.70117927", "0.67955554", "0.67798376", "0.671425", "0.6661538", "0.66558045", "0.6655384", "0.6621452", "0.65861356", "0.6531026", "0.65242946", "0.6503561", "0.6468593", "0.64621544", "0.6460762", "0.6459045", "0.644103", "0.64374304", "0.64142406", "0.6410538", "0.637257...
0.7326735
0