query stringlengths 7 9.55k | document stringlengths 10 363k | metadata dict | negatives listlengths 0 101 | negative_scores listlengths 0 101 | document_score stringlengths 3 10 | document_rank stringclasses 102 values |
|---|---|---|---|---|---|---|
N Without this we can't easily and compactly display the file name and hash value | def to_s
return "#{name} (#{hash})"
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def digest\n assert_file!\n Digest::SHA256.hexdigest(@name + Digest::SHA256.file(@path).to_s)\n end",
"def file_digest_key(stat)\n \"file_digest:#{compressed_path}:#{stat}\"\n end",
"def hash\n Digest::MD5.hexdigest(abs_filepath)[0..5]\n end",
"def hash\r\n # TODO what if fi... | [
"0.693466",
"0.6923132",
"0.685502",
"0.6758931",
"0.6752252",
"0.67483485",
"0.67191595",
"0.65860593",
"0.65592265",
"0.65478104",
"0.653117",
"0.6510235",
"0.6506937",
"0.6460275",
"0.6407951",
"0.6388847",
"0.63875854",
"0.6363617",
"0.62643",
"0.6218101",
"0.6215845",
... | 0.0 | -1 |
The relative name of this file in the content tree (relative to the base dir) N Without this we can't easily reconstruct the relative path as a single string | def relativePath
return (parentPathElements + [name]).join("/")
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def base_name\n File.basename @relative_name\n end",
"def file_name\n @file_name ||= File.basename tree\n end",
"def relative_path\n name\n end",
"def full_name\n @relative_name\n end",
"def full_name\n @relative_name\n end",
"def name\n file.partition(base).last.gsub... | [
"0.73514366",
"0.7159663",
"0.71327853",
"0.68912554",
"0.68912554",
"0.68154967",
"0.6811721",
"0.67829454",
"0.6760152",
"0.67581767",
"0.6756996",
"0.6729433",
"0.6613176",
"0.6596809",
"0.6593314",
"0.6582467",
"0.65686816",
"0.6567662",
"0.6563027",
"0.65217596",
"0.6510... | 0.6708049 | 12 |
N Without this we won't be able to initialise information about this directory based on knowing it's name and it's relative path, ready to have information about files and subdirectories added to it, and ready to be marked for deletion or copying as required. | def initialize(name = nil, parentPathElements = nil)
#N Without this we won't remember the name of the directory
@name = name
#N Without this we won't know the path elements of the sub-directory (within the directory tree) containing this directory
@pathElements = name == nil ? [] : parentPathElements + [name]
#N Without this we won't be ready to add files to the list of files in this directory
@files = []
#N Without this we won't be ready to add directories to the list of sub-directories immediately contained in this directory
@dirs = []
#N Without this we won't be ready to add files so we can look them up by name
@fileByName = {}
#N Without this we won't be ready to add immediate sub-directories so we can look them up by name
@dirByName = {}
#N Without this the directory object won't be in a default state of _not_ to be copied
@copyDestination = nil
#N Without this the directory object won't be in a default state of _not_ to be deleted
@toBeDeleted = false
#N Without this the directory object won't be in a default state of not yet having set the timestamp
@time = nil
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new_dirs; end",
"def directory(dir); end",
"def populate(level)\n\t\tinitialize\n\t\tself.name = level\n\t\tself.path = \"#{self.path}/#{level}\"\n\n\t\tDir.new(base_dir = \"#{self.path}\").each do |name|\n\t\tpath = \"#{base_dir}/#{name}\"\n\t\t\tif name !~ /^\\./\n\t\t\t\tif FileTest.directory?(path)\n\t... | [
"0.67366225",
"0.630066",
"0.62976193",
"0.6225253",
"0.6225253",
"0.6178601",
"0.6106406",
"0.6095938",
"0.59985054",
"0.5994428",
"0.59929293",
"0.5990757",
"0.5990757",
"0.5982805",
"0.5982805",
"0.5953122",
"0.59180796",
"0.5862592",
"0.58610743",
"0.58389884",
"0.5837092... | 0.7310212 | 0 |
mark this directory to be copied to a destination directory N Without this we can't mark a directory to be copied to a directory on a remote system | def markToCopy(destinationDirectory)
#N Without this it won't be marked for copying
@copyDestination = destinationDirectory
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def markToCopy(destinationDirectory)\n #N Without this we won't remember that the file is to be copied to the destination directory\n @copyDestination = destinationDirectory\n end",
"def markCopyOperations(destinationDir)\n #N Without this we can't loop over the immediate sub-directories to det... | [
"0.61730516",
"0.6078995",
"0.5986515",
"0.5841446",
"0.5800374",
"0.5770643",
"0.5669633",
"0.5629317",
"0.5507884",
"0.54967123",
"0.54917204",
"0.5449184",
"0.5449184",
"0.5390665",
"0.53606904",
"0.53490293",
"0.53425914",
"0.5295243",
"0.5292298",
"0.5289558",
"0.5272424... | 0.6880052 | 0 |
mark this directory (on a remote system) to be deleted N Without this we can't mark a directory (on a remote system) to be deleted | def markToDelete
#N Without this it won't be marked for deletion
@toBeDeleted = true
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def markToDelete\n #N Without this we won't remember that this file is to be deleted\n @toBeDeleted = true\n end",
"def delete_from_disk; end",
"def delete\n if processable? or unset?\n dir.delete\n else\n raise JobError.cannot_delete(@id, state)\n end\n ... | [
"0.6357208",
"0.6278554",
"0.6038147",
"0.5936869",
"0.5936011",
"0.5749203",
"0.57407403",
"0.57291937",
"0.5711708",
"0.57000226",
"0.5671138",
"0.56697077",
"0.5645765",
"0.56383324",
"0.5634468",
"0.563188",
"0.5628065",
"0.5579145",
"0.5565366",
"0.5562144",
"0.5549265",... | 0.63815486 | 0 |
the path of the directory that this content tree represents, relative to the base directory N Without this we can't know the relative path of the subdirectory within the content tree | def relativePath
#N Without this the path elements won't be joined together with "/" to get the relative path as a single string
return @pathElements.join("/")
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def relative_directory\n return '' unless @directory_root\n @path - @directory_root - name\n end",
"def containing_directory\n path.dirname\n end",
"def dir\n File.dirname(self.path)\n end",
"def directory\n self.path.directory\n end",
"def directory\n File.dirname... | [
"0.7396666",
"0.7095777",
"0.6993002",
"0.69723535",
"0.6970938",
"0.69383276",
"0.68717897",
"0.68664604",
"0.6858817",
"0.6816477",
"0.68033266",
"0.68033266",
"0.67954874",
"0.6773204",
"0.67649233",
"0.674438",
"0.67174965",
"0.6704844",
"0.6696508",
"0.66949326",
"0.6690... | 0.6221629 | 78 |
convert a path string to an array of path elements (or return it as is if it's already an array) N Without this we can't start from a path and decompose it into elements (optionally allowing for the case where the conversion has already been done) | def getPathElements(path)
#N Without this path as a single string won't be decomposed into a list of elements
return path.is_a?(String) ? (path == "" ? [] : path.split("/")) : path
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def split_path(str)\n return str.map(&:to_s) if str.is_a?(::Array)\n @delimiter_handler.split_path(str.to_s)\n end",
"def path_comps path\n path.nil? || path.empty? ? [] : path[1..(path[-1] == \"/\" ? -2 : -1)].split('/')\n end",
"def split_path(path_string)\n path_string.split('->').ma... | [
"0.66763914",
"0.6372118",
"0.6334941",
"0.6334941",
"0.6324023",
"0.61564857",
"0.6069321",
"0.6066315",
"0.59618604",
"0.5960444",
"0.59602547",
"0.59581316",
"0.59389573",
"0.5933138",
"0.591935",
"0.5890538",
"0.5838689",
"0.579191",
"0.5791829",
"0.5783735",
"0.5760701",... | 0.7467731 | 0 |
get the content tree for a subdirectory (creating it if it doesn't yet exist) N Without this we can't create the content tree for an immediate subdirectory of the directory represented by this content tree (which means we can't recursively create the full content tree for this directory) | def getContentTreeForSubDir(subDir)
#N Without this we won't know if the relevant sub-directory content tree hasn't already been created
dirContentTree = dirByName.fetch(subDir, nil)
#N Without this check, we'll be recreated the sub-directory content tree, even if we know it has already been created
if dirContentTree == nil
#N Without this the new sub-directory content tree won't be created
dirContentTree = ContentTree.new(subDir, @pathElements)
#N Without this the new sub-directory won't be added to the list of sub-directories of this directory
dirs << dirContentTree
#N Without this we won't be able to find the sub-directory content tree by name
dirByName[subDir] = dirContentTree
end
return dirContentTree
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getContentTree(baseDir)\n #N Without this, wouldn't have an empty content tree that we could start filling with dir & file data\n contentTree = ContentTree.new()\n #N Without this, wouldn't record the time of the content tree, and wouldn't be able to determine from a file's modification time tha... | [
"0.62971216",
"0.62222576",
"0.6052521",
"0.5828611",
"0.58244056",
"0.5779535",
"0.5749211",
"0.57088774",
"0.5687567",
"0.5676394",
"0.56480145",
"0.5604444",
"0.5598129",
"0.5568739",
"0.55377835",
"0.55223143",
"0.5521632",
"0.55180186",
"0.5499369",
"0.5484999",
"0.54515... | 0.7456548 | 0 |
add a subdirectory to this content tree Without this we won't be able to add a subdirectory (given as a path with possibly more than one element) into the content tree | def addDir(dirPath)
#N Without this, the directory path won't be broken up into its elements
pathElements = getPathElements(dirPath)
#N Without this check, it will fail in the case where dirPath has no elements in it
if pathElements.length > 0
#N Without this, we won't know the first element in the path (which is needed to construct the immediate sub-directory content-tree representing the first part of the path)
pathStart = pathElements[0]
#N Without this we won't know the rest of the elements so that we can add that part of the dir path into the content tree we've just created
restOfPath = pathElements[1..-1]
#N Without this the immedate sub-directory content tree and the chain of sub-directories within that won't be created
getContentTreeForSubDir(pathStart).addDir(restOfPath)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def add(path)\n chdir { super }\n end",
"def add(path); end",
"def add_folder name\n elem = ElemFolder.new(name)\n add_element(elem)\n end",
"def add_directory(directory)\n @directories[directory.name] = directory\n directory.parent = self\n end",
"def add_dir(path)\n Dir... | [
"0.63590306",
"0.62446785",
"0.62136775",
"0.62088394",
"0.61840373",
"0.6160388",
"0.6123743",
"0.61169434",
"0.60914564",
"0.60903114",
"0.6052374",
"0.60452986",
"0.6027106",
"0.60125977",
"0.594964",
"0.5932872",
"0.5915284",
"0.5890606",
"0.5866839",
"0.58628446",
"0.581... | 0.6739095 | 0 |
recursively sort the files and subdirectories of this content tree alphabetically N Without this, we will have to put up with subdirectories and filedirectories being listed in whatever order the listing commands happen to list them in, which may not be consisted across different copies of effectively the same content tree on different systems. | def sort!
#N Without this, the immediate sub-directories won't get sorted
dirs.sort_by! {|dir| dir.name}
#N Without this, files contained immediately in this directory won't get sorted
files.sort_by! {|file| file.name}
#N Without this, files and directories contained within sub-directories of this directory won't get sorted
for dir in dirs
#N Without this, this sub-directory won't have its contents sorted
dir.sort!
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def sort_files!\n site.collections.each_value { |c| c.docs.sort! }\n site.pages.sort_by!(&:name)\n site.static_files.sort_by!(&:relative_path)\n end",
"def sort_children\n @children.sort! do |a, b|\n if (a.leaf? && b.leaf?) || (!a.leaf? && !b.leaf?)\n a.name <=> b.name\n els... | [
"0.6565236",
"0.6553888",
"0.6486574",
"0.6473949",
"0.6463968",
"0.64280075",
"0.63442886",
"0.631743",
"0.6286578",
"0.62236816",
"0.61583024",
"0.6137821",
"0.6137821",
"0.60671806",
"0.60644627",
"0.60512304",
"0.6046765",
"0.60404986",
"0.602944",
"0.6025323",
"0.6001600... | 0.7741237 | 0 |
given a relative path, add a file and hash value to this content tree N Without this, we can't add a file description (given as a relative path and a hash value) into the content tree for this directory | def addFile(filePath, hash)
#N Without this the path won't be broken up into elements so that we can start by processing the first element
pathElements = getPathElements(filePath)
#N Without this check, we would attempt to process an invalid path consisting of an empty string or no path elements (since the path should always contain at least one element consisting of the file name)
if pathElements.length == 0
#N Without this, the case of zero path elements will not be treated as an error
raise "Invalid file path: #{filePath.inspect}"
end
#N Without this check, the cases of having the immediate file name (to be added as a file in this directory) and having a file within a sub-directory will not be distinguished
if pathElements.length == 1
#N Without this, the single path element will not be treated as being the immediate file name
fileName = pathElements[0]
#N Without this, we won't have our object representing the file name and a hash of its contents
fileContent = FileContent.new(fileName, hash, @pathElements)
#N Without this, the file&content object won't be added to the list of files contained in this directory
files << fileContent
#N Without this, we won't be able to look up the file&content object by name.
fileByName[fileName] = fileContent
else
#N Without this, we won't have the first part of the file path required to identify the immediate sub-directory that it is found in.
pathStart = pathElements[0]
#N Without this, we won't have the rest of the path which needs to be passed to the content tree in the immediate sub-directory
restOfPath = pathElements[1..-1]
#N Without this, the file & hash won't be added into the sub-directory's content tree
getContentTreeForSubDir(pathStart).addFile(restOfPath, hash)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def add(path)\n blob = GitRb::Blob.new(path)\n FileUtils.mkdir_p(blob.directory)\n File.open(blob.object_path, 'w') { |f| f.write(blob.deflated_content) }\n GitRb::Index.new.add(blob)\n end",
"def add_file absolute_name, relative_name: absolute_name, parser: nil\n unless top_l... | [
"0.59910583",
"0.59862834",
"0.5984579",
"0.5936294",
"0.5856249",
"0.580495",
"0.56869835",
"0.5684682",
"0.56463146",
"0.56170005",
"0.55952966",
"0.55853224",
"0.55565417",
"0.5550237",
"0.55163854",
"0.55110806",
"0.5472231",
"0.5436014",
"0.54193217",
"0.5386023",
"0.536... | 0.6424309 | 0 |
prettyprint this content tree N Without this, we won't have a way to output a nice easytoread description of this content tree object | def showIndented(name = "", indent = " ", currentIndent = "")
#N Without this check, would attempt to output time for directories other than the root directory for which time has not been recorded
if time != nil
#N Without this, any recorded time value wouldn't be output
puts "#{currentIndent}[TIME: #{time.strftime(@@dateTimeFormat)}]"
end
#N Without this check, an empty line would be output for root level (which has no name within the content tree)
if name != ""
#N Without this,non-root sub-directories would not be displayed
puts "#{currentIndent}#{name}"
end
#N Without this check, directories not to be copied would be shown as to be copied
if copyDestination != nil
#N Without this, directories marked to be copied would not be displayed as such
puts "#{currentIndent} [COPY to #{copyDestination.relativePath}]"
end
#N Without this check, directories not be to deleted would be shown as to be deleted
if toBeDeleted
#N Without this, directories marked to be deleted would not be displayed as such
puts "#{currentIndent} [DELETE]"
end
#N Without this, output for sub-directories and files would not be indented further than their parent
nextIndent = currentIndent + indent
#N Without this, sub-directories of this directory won't be included in the output
for dir in dirs
#N Without this, this sub-directory won't be included in the output (suitable indented relative to the parent)
dir.showIndented("#{dir.name}/", indent = indent, currentIndent = nextIndent)
end
#N Without this, files contained immediately in this directory won't be included in the output
for file in files
#N Without this, this file and the hash of its contents won't be shown in the output
puts "#{nextIndent}#{file.name} - #{file.hash}"
#N Without this check, files not to be copied would be shown as to be copied
if file.copyDestination != nil
#N Without this, files marked to be copied would not be displayed as such
puts "#{nextIndent} [COPY to #{file.copyDestination.relativePath}]"
end
#N Without this check, files not to be deleted would be shown as to be deleted
if file.toBeDeleted
#N Without this, files marked to be deleted would not be displayed as such
puts "#{nextIndent} [DELETE]"
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def to_s\n object_identifier = \"#<#{self.class.to_s}:0x#{'%x' % (self.object_id << 1)}\\n\"\n close_object = \">\\n\"\n \n case self\n when RubyBBCode::BBTree\n object_identifier + \"Children: #{count_child_nodes}\\n\" + self.to_v + close_object\n when RubyBBCode::TagNode # ... | [
"0.68639475",
"0.6719232",
"0.6704638",
"0.66586185",
"0.6626054",
"0.6588514",
"0.6568514",
"0.6499684",
"0.6474251",
"0.6451705",
"0.6420791",
"0.63706154",
"0.6364939",
"0.63569766",
"0.6339204",
"0.6323295",
"0.6301825",
"0.6301825",
"0.62963414",
"0.62963045",
"0.6294872... | 0.0 | -1 |
write this content tree to an open file, indented N Without this, the details for the content tree could not be output to a file in a format that could be read in again (by readFromFile) | def writeLinesToFile(outFile, prefix = "")
#N Without this check, it would attempt to write out a time value when none was available
if time != nil
#N Without this, a line for the time value would not be written to the file
outFile.puts("T #{time.strftime(@@dateTimeFormat)}\n")
end
#N Without this, directory information would not be written to the file (for immediate sub-directories)
for dir in dirs
#N Without this, a line for this sub-directory would not be written to the file
outFile.puts("D #{prefix}#{dir.name}\n")
#N Without this, lines for the sub-directories and files contained with this directory would not be written to the file
dir.writeLinesToFile(outFile, "#{prefix}#{dir.name}/")
end
#N Without this, information for files directly contained within this directory would not be written to the file
for file in files
#N Without this, the line for this file would not be written to the file
outFile.puts("F #{file.hash} #{prefix}#{file.name}\n")
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def writeToFile(fileName)\n #N Without this, the user would not have feedback that the content tree is being written to the named file\n puts \"Writing content tree to file #{fileName} ...\"\n #N Without this, the named file cannot be written to\n File.open(fileName, \"w\") do |outFile|\n ... | [
"0.6924604",
"0.68879145",
"0.6607039",
"0.6411231",
"0.6252944",
"0.6021157",
"0.6019684",
"0.60032004",
"0.59965795",
"0.5967358",
"0.5846589",
"0.57522166",
"0.57512265",
"0.57368577",
"0.5684459",
"0.5684459",
"0.56177527",
"0.560796",
"0.56032205",
"0.55842906",
"0.55652... | 0.51267797 | 84 |
write this content tree to a file (in a format which readFromFile can read back in) N Without this, information for a content tree could not be output to a named file in a format that could be read in again (by readFromFile) | def writeToFile(fileName)
#N Without this, the user would not have feedback that the content tree is being written to the named file
puts "Writing content tree to file #{fileName} ..."
#N Without this, the named file cannot be written to
File.open(fileName, "w") do |outFile|
#N Without this, the lines of information for the content tree will not be written to the open file
writeLinesToFile(outFile)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def writeTree(file)\n file.write toString\n end",
"def writeTree(file)\n file.write @tree.toString\n end",
"def save args={}\n raise ArgumentError, \"No file name provided\" if args[:filename].nil?\n @savable_sgf = \"(\"\n @root.children.each { |child| write_node child }\n @savable_... | [
"0.708938",
"0.6944336",
"0.64942193",
"0.63798296",
"0.6378713",
"0.6258802",
"0.60908926",
"0.6045727",
"0.5970564",
"0.5955621",
"0.59488744",
"0.59364456",
"0.58001804",
"0.57799035",
"0.57076114",
"0.5681749",
"0.565616",
"0.56336623",
"0.56313986",
"0.5617701",
"0.56037... | 0.7386327 | 0 |
Mark operations for this (source) content tree and the destination content tree in order to synch the destination content tree with this one | def markSyncOperationsForDestination(destination)
markCopyOperations(destination)
destination.markDeleteOptions(self)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def markSyncOperations\n #N Without this, the sync operations won't be marked\n @sourceContent.markSyncOperationsForDestination(@destinationContent)\n #N Without these puts statements, the user won't receive feedback about what sync operations (i.e. copies and deletes) are marked for execution\n ... | [
"0.70432514",
"0.5568062",
"0.5516127",
"0.5390006",
"0.53412265",
"0.5340595",
"0.53319335",
"0.5282468",
"0.52564025",
"0.5224698",
"0.5191247",
"0.5163606",
"0.5136976",
"0.5121602",
"0.51038164",
"0.5047916",
"0.50231063",
"0.5011603",
"0.50046",
"0.49930847",
"0.49810964... | 0.62798727 | 1 |
Get the named subdirectory content tree, if it exists N Without this we wouln't have an easy way to get an immediate subdirectory by name, but returning nil for one that doesn't exist (in the case where the name is from a different directory, and that directory doesn't exist in this one) | def getDir(dir)
return dirByName.fetch(dir, nil)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _dir(name, obj = @obj)\n obj[\"dirs\"].reverse.each do |dir|\n return dir[name] if dir[name]\n end\n return nil\n end",
"def find_directory(name, current = Pathname.new('.'))\n raise \"Cannot find directory #{name}\" if current.expand_path.root?\n path = current + name\n ... | [
"0.6202693",
"0.5947798",
"0.5773851",
"0.5719538",
"0.5663004",
"0.5662706",
"0.5654844",
"0.5619072",
"0.55437714",
"0.5455498",
"0.54215175",
"0.5388436",
"0.53773755",
"0.5345005",
"0.5333558",
"0.5329629",
"0.53231543",
"0.5316825",
"0.5307021",
"0.52685565",
"0.5262291"... | 0.50346136 | 48 |
Get the named file & hash value, if it exists N Without this we wouln't have an easy way to get an immediate file & hash by name, but returning nil for one that doesn't exist (in the case where the name is from a different directory, and that file doesn't exist in this one) | def getFile(file)
return fileByName.fetch(file, nil)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def find_file_named name\n @files_hash[name]\n end",
"def get_value(path, hash)\n path.split('.').each do |key|\n hash = hash[key]\n end\n raise if hash.nil?\n\n hash\n rescue\n \"ValueNotFound\"\n end",
"def try_file(filename)\n return file(filename)\n rescue FileMissing\n ... | [
"0.69655967",
"0.6405222",
"0.59801364",
"0.59801364",
"0.5943641",
"0.5928381",
"0.5913333",
"0.59020627",
"0.58822113",
"0.58430594",
"0.57913065",
"0.57733846",
"0.57731915",
"0.57544047",
"0.57515925",
"0.56894124",
"0.5687003",
"0.5682225",
"0.5659012",
"0.5657253",
"0.5... | 0.62720704 | 2 |
Mark copy operations, given that the corresponding destination directory already exists. For files and directories that don't exist in the destination, mark them to be copied. For subdirectories that do exist, recursively mark the corresponding subdirectory copy operations. N Without this we won't know how to mark which subdirectories and files in this (source) directory need to by marked for copying into the other directory, because they don't exist in the other (destination) directory | def markCopyOperations(destinationDir)
#N Without this we can't loop over the immediate sub-directories to determine how each one needs to be marked for copying
for dir in dirs
#N Without this we won't have the corresponding sub-directory in the other directory with the same name as this sub-directory (if it exists)
destinationSubDir = destinationDir.getDir(dir.name)
#N Without this check, we won't be able to correctly process a sub-directory based on whether or not one with the same name exists in the other directory
if destinationSubDir != nil
#N Without this, files and directories missing or changed from the other sub-directory (which does exist) won't get copied
dir.markCopyOperations(destinationSubDir)
else
#N Without this, the corresponding missing sub-directory in the other directory won't get updated from this sub-directory
dir.markToCopy(destinationDir)
end
end
#N Without this we can't loop over the files to determine how each one needs to be marked for copying
for file in files
#N Without this we won't have the corresponding file in the other directory with the same name as this file (if it exists)
destinationFile = destinationDir.getFile(file.name)
#N Without this check, this file will get copied, even if it doesn't need to be (it only needs to be if it is missing, or the hash is different)
if destinationFile == nil or destinationFile.hash != file.hash
#N Without this, a file that is missing or changed won't get copied (even though it needs to be)
file.markToCopy(destinationDir)
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def doCopyOperations(sourceContent, destinationContent, dryRun)\n #N Without this loop, we won't copy the directories that are marked for copying\n for dir in sourceContent.dirs\n #N Without this check, we would attempt to copy those directories _not_ marked for copying (but which might still have... | [
"0.7002961",
"0.6833688",
"0.6571331",
"0.6571331",
"0.640944",
"0.63271123",
"0.63205224",
"0.63011503",
"0.6075234",
"0.60031825",
"0.59765774",
"0.5961834",
"0.58510214",
"0.58310795",
"0.58217263",
"0.5815152",
"0.5803611",
"0.5789002",
"0.5783199",
"0.5774021",
"0.577301... | 0.82695365 | 0 |
Mark delete operations, given that the corresponding source directory exists. For files and directories that don't exist in the source, mark them to be deleted. For subdirectories that do exist, recursively mark the corresponding subdirectory delete operations. N Without this we won't know how to mark which subdirectories and files in this (destination) directory need to by marked for deleting (because they don't exist in the other source directory) | def markDeleteOptions(sourceDir)
#N Without this we can't loop over the immediate sub-directories to determine how each one needs to be marked for deleting
for dir in dirs
#N Without this we won't have the corresponding sub-directory in the other directory with the same name as this sub-directory (if it exists)
sourceSubDir = sourceDir.getDir(dir.name)
#N Without this check, we won't be able to correctly process a sub-directory based on whether or not one with the same name exists in the other directory
if sourceSubDir == nil
#N Without this, this directory won't be deleted, even though it doesn't exist at all in the corresponding source directory
dir.markToDelete()
else
#N Without this, files and directories missing from the other source sub-directory (which does exist) won't get deleted
dir.markDeleteOptions(sourceSubDir)
end
end
#N Without this we can't loop over the files to determine which ones need to be marked for deleting
for file in files
#N Without this we won't known if the corresponding file in the source directory with the same name as this file exists
sourceFile = sourceDir.getFile(file.name)
#N Without this check, we will incorrectly delete this file whether or not it exists in the source directory
if sourceFile == nil
#N Without this, this file which doesn't exist in the source directory won't get deleted from this directory
file.markToDelete()
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def doDeleteOperations(destinationContent, dryRun)\n #N Without this loop, we won't delete all sub-directories or files and directories within sub-directories which have been marked for deletion\n for dir in destinationContent.dirs\n #N Without this check, we would delete directories which have no... | [
"0.6534344",
"0.60478157",
"0.5677536",
"0.5677536",
"0.55911976",
"0.5584727",
"0.5559137",
"0.5556352",
"0.5528383",
"0.5495735",
"0.54590863",
"0.5455199",
"0.544853",
"0.5363932",
"0.5359782",
"0.53061295",
"0.5294999",
"0.5294999",
"0.5244365",
"0.52304226",
"0.52049124"... | 0.7721328 | 0 |
N Without this constructor, there is no way to construct a content location object with readonly cached content file attribute | def initialize(cachedContentFile)
#N Without this the name of the cached content file won't be remembered
@cachedContentFile = cachedContentFile
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def initialize(baseDirectory, hashClass, cachedContentFile = nil)\n #N Without this, we won't remember the cached content file name\n super(cachedContentFile)\n #N Without this, we won't remember the base directory\n @baseDirectory = baseDirectory\n #N Without this, we won't remember the h... | [
"0.642407",
"0.63396466",
"0.63380396",
"0.62640715",
"0.6249576",
"0.6183083",
"0.6082768",
"0.60645586",
"0.6049961",
"0.6023843",
"0.60098326",
"0.5988599",
"0.5935178",
"0.59269065",
"0.59102565",
"0.5870863",
"0.5849602",
"0.58016145",
"0.58016145",
"0.57950556",
"0.5771... | 0.7335866 | 0 |
Get the cached content file name, if specified, and if the file exists N Without this there is no easy way to get the existing cached content tree (if the cached content file is specified, and if the file exists) | def getExistingCachedContentTreeFile
#N Without this check, it would try to find the cached content file when none was specified
if cachedContentFile == nil
#N Without this, there will be no feedback to the user that no cached content file is specified
puts "No cached content file specified for location"
return nil
#N Without this check, it will try to open the cached content file when it doesn't exist (i.e. because it hasn't been created, or, it has been deleted)
elsif File.exists?(cachedContentFile)
#N Without this, it won't return the cached content file when it does exist
return cachedContentFile
else
#N Without this, there won't be feedback to the user that the specified cached content file doesn't exist.
puts "Cached content file #{cachedContentFile} does not yet exist."
return nil
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getCachedContentTree\n #N Without this, we won't know the name of the specified cached content file (if it is specified)\n file = getExistingCachedContentTreeFile\n #N Without this check, we would attempt to read a non-existent file\n if file\n #N Without this, a content tree that ha... | [
"0.7655323",
"0.66739684",
"0.6588756",
"0.64611346",
"0.6433945",
"0.637749",
"0.6322822",
"0.6317328",
"0.6283636",
"0.6129526",
"0.6094344",
"0.6058541",
"0.6056393",
"0.6007544",
"0.5934763",
"0.59108925",
"0.5858681",
"0.5827437",
"0.5817149",
"0.5802685",
"0.5795141",
... | 0.79947555 | 0 |
Delete any existing cached content file N Without this, there won't be an easy way to delete the cached content file (if it is specified and it exists) | def clearCachedContentFile
#N Without this check, it will try to delete a cached content file even when it doesn't exist
if cachedContentFile and File.exists?(cachedContentFile)
#N Without this, there will be no feedback to the user that the specified cached content file is being deleted
puts " deleting cached content file #{cachedContentFile} ..."
#N Without this, the specified cached content file won't be deleted
File.delete(cachedContentFile)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def delete_cache_files; end",
"def delete_cache_files; end",
"def delete(key)\n File.unlink cache_path(key)\n rescue Errno::ENOENT\n end",
"def delete key\n File.delete( cache_file(key) ) if File.exists?( cache_file(key) )\n end",
"def delete_cached_file(no_raise: false)\n __debug_items(b... | [
"0.7299269",
"0.7299269",
"0.7096548",
"0.70510894",
"0.66761404",
"0.66008186",
"0.64736533",
"0.6442716",
"0.6430355",
"0.64131874",
"0.6367565",
"0.6352273",
"0.63115203",
"0.62943643",
"0.6253384",
"0.61624074",
"0.6156015",
"0.6105692",
"0.6085801",
"0.6084458",
"0.60583... | 0.73291236 | 0 |
Get the cached content tree (if any), read from the specified cached content file. N Without this method, there won't be an easy way to get the cached content from the cached content file (if the file is specified, and if it exists) | def getCachedContentTree
#N Without this, we won't know the name of the specified cached content file (if it is specified)
file = getExistingCachedContentTreeFile
#N Without this check, we would attempt to read a non-existent file
if file
#N Without this, a content tree that has been cached won't be returned.
return ContentTree.readFromFile(file)
else
return nil
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getContentTree\n #N Without this check we would try to read the cached content file when there isn't one, or alternatively, we would retrieve the content details remotely, when we could have read them for a cached content file\n if cachedContentFile and File.exists?(cachedContentFile)\n #N Wit... | [
"0.74917275",
"0.7052276",
"0.704695",
"0.7043376",
"0.68831205",
"0.67872566",
"0.678021",
"0.6449568",
"0.64164776",
"0.6360816",
"0.6349079",
"0.6342801",
"0.6331629",
"0.63180935",
"0.630582",
"0.6261884",
"0.62607056",
"0.6240054",
"0.6202332",
"0.61656773",
"0.61142474"... | 0.8271353 | 0 |
Read a map of file hashes (mapping from relative file name to hash value) from the specified cached content file N Without this, there won't be an easy way to get a map of file hashes (keyed by relative file name), for the purpose of getting the hashes of existing files which are known not to have changed (by comparing modification time to timestamp, which is also returned) | def getCachedContentTreeMapOfHashes
#N Without this, we won't know the name of the specified cached content file (if it is specified)
file = getExistingCachedContentTreeFile
#N Without this check, we would attempt to read a non-existent file
if file
#N Without this, there won't be feedback to the user that we are reading the cached file hashes
puts "Reading cached file hashes from #{file} ..."
#N Without this, a map of cached file hashes won't be returned
return ContentTree.readMapOfHashesFromFile(file)
else
#N Without this, the method wouldn't consistently return an array of timestamp + map of hashes in the case where there is no cached content file
return [nil, {}]
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def buildCodeFilesHashFromFiles()\n\t\tdir = @cacheDirPath \n\t\tfilesList = Dir.glob(dir + \"**/*\").select{|e| File.file? e}\n\t\tfilesList.map.with_index{|file,index|\n\t\t\t#p \"cacheFile: \" + index.to_s if index % 1000 == 0\n\t\t\tp \"cacheFile: \" + index.to_s \n\t\t\tfilePath = dir + index.to_s + \".yaml\"... | [
"0.65218055",
"0.64184135",
"0.611738",
"0.60881734",
"0.6085953",
"0.60673094",
"0.5851411",
"0.57640356",
"0.57250535",
"0.5724361",
"0.56876105",
"0.56682676",
"0.562255",
"0.5619828",
"0.55920434",
"0.55652946",
"0.55644053",
"0.55460685",
"0.5538999",
"0.5534997",
"0.544... | 0.69449836 | 0 |
N Without this, we won't be able to construct an object representing a local content location, with readonly attributes specifying the directory, the hash function, and, optionally, the name of the cached content file. | def initialize(baseDirectory, hashClass, cachedContentFile = nil)
#N Without this, we won't remember the cached content file name
super(cachedContentFile)
#N Without this, we won't remember the base directory
@baseDirectory = baseDirectory
#N Without this, we won't remember the hash function
@hashClass = hashClass
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def initialize(cachedContentFile)\n #N Without this the name of the cached content file won't be remembered\n @cachedContentFile = cachedContentFile\n end",
"def initialize(cache_dir, data)\n @cache_dir = Path.new(cache_dir)\n @data = data\n end",
"def initialize(contentHost, baseDir,... | [
"0.6595214",
"0.6238965",
"0.617047",
"0.61021125",
"0.60642135",
"0.60487205",
"0.60059464",
"0.59772754",
"0.597332",
"0.5951963",
"0.5951963",
"0.5827734",
"0.58217674",
"0.5810983",
"0.5810289",
"0.57476544",
"0.5727007",
"0.57262546",
"0.56889504",
"0.56822413",
"0.56759... | 0.65742356 | 1 |
get the full path of a relative path (i.e. of a file/directory within the base directory) N Without this, we won't have an easy way to calculate the full path of a file or directory in the content tree that is specified by its relative path. | def getFullPath(relativePath)
return @baseDirectory.fullPath + relativePath
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def relativePath\n #N Without this the path elements won't be joined together with \"/\" to get the relative path as a single string\n return @pathElements.join(\"/\")\n end",
"def relative_path\n must_be File\n Pathname.new(self.full_path).relative_path_from(Pathname.new(Dir.pwd)).to_s\n end... | [
"0.7824317",
"0.7762498",
"0.7634046",
"0.75506514",
"0.7499013",
"0.7491094",
"0.7317677",
"0.7282227",
"0.72361356",
"0.7230335",
"0.7229075",
"0.72046477",
"0.71950006",
"0.7169684",
"0.71660477",
"0.71440613",
"0.7143346",
"0.70555377",
"0.70435154",
"0.700468",
"0.698823... | 0.7330189 | 6 |
get the content tree for this base directory by iterating over all subdirectories and files within the base directory (and excluding the excluded files) and calculating file hashes using the specified Ruby hash class If there is an existing cached content file, use that to get the hash values of files whose modification time is earlier than the time value for the cached content tree. Also, if a cached content file is specified, write the final content tree back out to the cached content file. N Without this we won't have way to get the content tree object describing the contents of the local directory | def getContentTree
#N Without this we won't have timestamp and the map of file hashes used to efficiently determine the hash of a file which hasn't been modified after the timestamp
cachedTimeAndMapOfHashes = getCachedContentTreeMapOfHashes
#N Without this we won't have the timestamp to compare against file modification times
cachedTime = cachedTimeAndMapOfHashes[0]
#N Without this we won't have the map of file hashes
cachedMapOfHashes = cachedTimeAndMapOfHashes[1]
#N Without this we won't have an empty content tree which can be populated with data describing the files and directories within the base directory
contentTree = ContentTree.new()
#N Without this we won't have a record of a time which precedes the recording of directories, files and hashes (which can be used when this content tree is used as a cached for data when constructing some future content tree)
contentTree.time = Time.now.utc
#N Without this, we won't record information about all sub-directories within this content tree
for subDir in @baseDirectory.subDirs
#N Without this, this sub-directory won't be recorded in the content tree
contentTree.addDir(subDir.relativePath)
end
#N Without this, we won't record information about the names and contents of all files within this content tree
for file in @baseDirectory.allFiles
#N Without this, we won't know the digest of this file (if we happen to have it) from the cached content tree
cachedDigest = cachedMapOfHashes[file.relativePath]
#N Without this check, we would assume that the cached digest applies to the current file, even if one wasn't available, or if the file has been modified since the time when the cached value was determined.
# (Extra note: just checking the file's mtime is not a perfect check, because a file can "change" when actually it or one of it's enclosing sub-directories has been renamed, which might not reset the mtime value for the file itself.)
if cachedTime and cachedDigest and File.stat(file.fullPath).mtime < cachedTime
#N Without this, the digest won't be recorded from the cached digest in those cases where we know the file hasn't changed
digest = cachedDigest
else
#N Without this, a new digest won't be determined from the calculated hash of the file's actual contents
digest = hashClass.file(file.fullPath).hexdigest
end
#N Without this, information about this file won't be added to the content tree
contentTree.addFile(file.relativePath, digest)
end
#N Without this, the files and directories in the content tree might be listed in some indeterminate order
contentTree.sort!
#N Without this check, a new version of the cached content file will attempt to be written, even when no name has been specified for the cached content file
if cachedContentFile != nil
#N Without this, a new version of the cached content file (ready to be used next time) won't be created
contentTree.writeToFile(cachedContentFile)
end
return contentTree
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getContentTree\n #N Without this check we would try to read the cached content file when there isn't one, or alternatively, we would retrieve the content details remotely, when we could have read them for a cached content file\n if cachedContentFile and File.exists?(cachedContentFile)\n #N Wit... | [
"0.6532654",
"0.62289214",
"0.6088277",
"0.58565265",
"0.5744223",
"0.5535684",
"0.5490923",
"0.5486155",
"0.5483177",
"0.545822",
"0.5385174",
"0.5376981",
"0.53395647",
"0.5328105",
"0.5323563",
"0.529577",
"0.5282739",
"0.5270191",
"0.5257498",
"0.5239051",
"0.52177536",
... | 0.7902518 | 0 |
N Without this we wouldn't be able to create the remote content location object with readonly attributes | def initialize(contentHost, baseDir, cachedContentFile = nil)
# Without super, we won't remember the cached content file (if specified)
super(cachedContentFile)
# Without this we won't remember which remote server to connect to
@contentHost = contentHost
# Without this we won't remember which directoy on the remote server to sync to.
@baseDir = normalisedDir(baseDir)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def location= attrs\n self.location_attributes= attrs\n end",
"def store_location!; end",
"def full_location; end",
"def set_object_origin(content, value)\n content.ox = value\n end",
"def set_object_origin(content, value)\n content.oy = value\n end",
"def location\n ... | [
"0.58561534",
"0.5666641",
"0.55912244",
"0.5535023",
"0.5524067",
"0.5522559",
"0.54908633",
"0.5467026",
"0.54587656",
"0.54461074",
"0.54368997",
"0.5426398",
"0.5370556",
"0.5369927",
"0.5359258",
"0.5343393",
"0.5340842",
"0.53316486",
"0.5327592",
"0.5317931",
"0.531793... | 0.0 | -1 |
N Without this we won't have any way to close cached open connections (and they will leak) | def closeConnections
#N Without this the cached connections won't get closed
@contentHost.closeConnections()
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def close\n # by default do nothing - close any cached connections\n end",
"def close_expired_connections\n raise NotImplementedError\n end",
"def connection_pool_maximum_reuse\n super\n end",
"def empty_connection_pools!; end",
"def close_connection; end",
"def connection_closed\... | [
"0.75031",
"0.7004418",
"0.6814353",
"0.6683453",
"0.6613164",
"0.655596",
"0.6445006",
"0.6427194",
"0.6392217",
"0.63802546",
"0.6344764",
"0.633376",
"0.6324023",
"0.6317249",
"0.63168055",
"0.630244",
"0.628612",
"0.6278678",
"0.6278678",
"0.6272998",
"0.62502193",
"0.6... | 0.744485 | 1 |
list files within the base directory on the remote contentHost N Without this we won't have an easy way to list all files in the remote directory on the remote system | def listFiles()
#N Without this the files won't get listed
contentHost.listFiles(baseDir)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def listFiles(baseDir)\n #N Without this, the base directory might be missing the final '/', which might cause a one-off error when 'subtracting' the base directory name from the absolute paths to get relative paths\n baseDir = normalisedDir(baseDir)\n #N Without this we wouldn't be executing the co... | [
"0.75602233",
"0.7403312",
"0.7333173",
"0.7320576",
"0.70485646",
"0.70335466",
"0.6907107",
"0.6856096",
"0.6570057",
"0.6569688",
"0.6500472",
"0.6461954",
"0.6431847",
"0.6402489",
"0.63706017",
"0.63605845",
"0.6333622",
"0.63274324",
"0.6290226",
"0.62764335",
"0.626983... | 0.8051429 | 0 |
object required to execute SCP (e.g. "scp" or "pscp", possibly with extra args) N Without this we won't have a handle on the object used to perform SSH/SCP actions | def sshAndScp
return contentHost.sshAndScp
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def scp options = {}\n self.__commands << options.merge({:type => :scp})\n end",
"def initialize(shell, scpProgram)\n #N Without this we won't have the remote shell command as an array of executable + arguments\n @shell = shell.is_a?(String) ? [shell] : shell\n #N Without this we won't have th... | [
"0.6245335",
"0.60075974",
"0.5718014",
"0.54305464",
"0.5376445",
"0.5346248",
"0.5294634",
"0.52113396",
"0.5139295",
"0.5126871",
"0.5095302",
"0.5030562",
"0.49899578",
"0.4964725",
"0.49260777",
"0.49208206",
"0.48916313",
"0.48773062",
"0.48633236",
"0.4855343",
"0.4850... | 0.46920723 | 30 |
get the full path of a relative path N Without this we won't have an easy way to get the full path of a file or directory specified relative the remote directory | def getFullPath(relativePath)
return baseDir + relativePath
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def relativePath\n #N Without this the path elements won't be joined together with \"/\" to get the relative path as a single string\n return @pathElements.join(\"/\")\n end",
"def full_path_to_remote_dir\n (remote_dir[0] == ?/ ? remote_dir : \"$(pwd)/#{remote_dir}\").chomp('/')\n end",
... | [
"0.6849355",
"0.67603993",
"0.674682",
"0.67232275",
"0.66965675",
"0.66868615",
"0.6637166",
"0.6484007",
"0.64763343",
"0.64621866",
"0.6461172",
"0.6446828",
"0.6398459",
"0.6398444",
"0.63929063",
"0.6375933",
"0.6336155",
"0.6330845",
"0.63110167",
"0.6292367",
"0.629098... | 0.66038984 | 7 |
execute an SSH command on the remote host (or just pretend, if dryRun is true) N Without this we won't have a direct method to execute SSH commands on the remote server (with dryrun option) | def ssh(commandString, dryRun = false)
contentHost.sshAndScp.ssh(commandString, dryRun)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def remote_run cmd\n ssh = ssh_command(cmd)\n _show_cmd ssh\n system(ssh) unless @opts[:norun] || $norun\n end",
"def ssh(commandString, dryRun)\n #N Without this we won't have a description to display (although the value is only used in the next statement)\n description = \"SSH #{user}@#{hos... | [
"0.7644817",
"0.758081",
"0.7467946",
"0.74653494",
"0.74214137",
"0.7131159",
"0.70903796",
"0.7046067",
"0.69887936",
"0.6958053",
"0.6955614",
"0.6892141",
"0.68638366",
"0.6850241",
"0.6842132",
"0.6804094",
"0.67964673",
"0.6788307",
"0.67848915",
"0.6774967",
"0.6751783... | 0.78252125 | 0 |
list all subdirectories of the base directory on the remote host N Without this we won't have a direct method to list all subdirectories within the remote directory | def listDirectories
return contentHost.listDirectories(baseDir)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def listDirectories(baseDir)\n #N if un-normalised, code assuming '/' at the end might be one-off\n baseDir = normalisedDir(baseDir)\n #N without the command, we don't know what command to execute to list the directories\n command = findDirectoriesCommand(baseDir)\n #N without this, the co... | [
"0.6535337",
"0.63889694",
"0.6366609",
"0.6309605",
"0.62799674",
"0.6137568",
"0.61146027",
"0.6108654",
"0.6067611",
"0.60400695",
"0.6033286",
"0.59724987",
"0.595714",
"0.5920253",
"0.58352244",
"0.58130527",
"0.5812346",
"0.581129",
"0.58086365",
"0.5791609",
"0.5789728... | 0.6408531 | 1 |
list all the file hashes of the files within the base directory N Without this we won't have a direct method to list files within the remote directory, together with their hashes | def listFileHashes
return contentHost.listFileHashes(baseDir)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def listFileHashLines(baseDir)\n #N Without this, the base directory might be missing the final '/', which might cause a one-off error when 'subtracting' the base directory name from the absolute paths to get relative paths\n baseDir = normalisedDir(baseDir)\n #N Without this, we wouldn't know what ... | [
"0.6858345",
"0.65153563",
"0.64512634",
"0.61644745",
"0.60218716",
"0.6014607",
"0.59915817",
"0.5990313",
"0.59625727",
"0.5960861",
"0.5960861",
"0.59424496",
"0.59277815",
"0.591865",
"0.5794828",
"0.57515365",
"0.5734461",
"0.56097907",
"0.5580782",
"0.5576285",
"0.5576... | 0.70343643 | 0 |
N Without this we won't have an easy way to present a description of this object (for tracing, feedback) | def to_s
return contentHost.locationDescriptor(baseDir)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def inspect()\n #This is a stub, used for indexing\n end",
"def inspect\n \"#{self.class}<#{@description.inspect}>\"\n end",
"def inspect() end",
"def inspect() end",
"def inspect() end",
"def inspect() end",
"def inspect() end",
"def inspect() end",
"def inspect() end... | [
"0.78157413",
"0.7709494",
"0.7525486",
"0.7525486",
"0.7525486",
"0.7525486",
"0.7525486",
"0.7525486",
"0.7525486",
"0.7525486",
"0.7525486",
"0.7525486",
"0.7525486",
"0.7525486",
"0.7525486",
"0.7525486",
"0.7507172",
"0.7507172",
"0.7507172",
"0.7507172",
"0.7507172",
... | 0.0 | -1 |
Get the content tree, from the cached content file if it exists, otherwise get if from listing directories and files and hash values thereof on the remote host. And also, if the cached content file name is specified, write the content tree out to that file. N Without this we won't have a way to get the content tree representing the contents of the remote directory, possibly using an existing cached content tree file (and if not, possibly saving a cached content tree for next time) | def getContentTree
#N Without this check we would try to read the cached content file when there isn't one, or alternatively, we would retrieve the content details remotely, when we could have read them for a cached content file
if cachedContentFile and File.exists?(cachedContentFile)
#N Without this, the content tree won't be read from the cached content file
return ContentTree.readFromFile(cachedContentFile)
else
#N Without this, we wouldn't retrieve the remote content details
contentTree = contentHost.getContentTree(baseDir)
#N Without this, the content tree might be in an arbitrary order
contentTree.sort!
#N Without this check, we would try to write a cached content file when no name has been specified for it
if cachedContentFile != nil
#N Without this, the cached content file wouldn't be updated from the most recently retrieved details
contentTree.writeToFile(cachedContentFile)
end
#N Without this, the retrieved sorted content tree won't be retrieved
return contentTree
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getCachedContentTree\n #N Without this, we won't know the name of the specified cached content file (if it is specified)\n file = getExistingCachedContentTreeFile\n #N Without this check, we would attempt to read a non-existent file\n if file\n #N Without this, a content tree that ha... | [
"0.7189733",
"0.66510344",
"0.6631559",
"0.6146769",
"0.60231584",
"0.57145214",
"0.57019234",
"0.57004094",
"0.56474715",
"0.5507554",
"0.549098",
"0.5481192",
"0.54659945",
"0.5434707",
"0.5413252",
"0.5395721",
"0.5387818",
"0.533445",
"0.5314144",
"0.5308171",
"0.53073406... | 0.7833455 | 0 |
N Without this we wouldn't have an easy way to create the sync operation object with all attributes specified (and with readonly attributes) | def initialize(sourceLocation, destinationLocation)
#N Without this, we wouldn't remember the (local) source location
@sourceLocation = sourceLocation
#N Without this, we wouldn't remember the (remote) destination location
@destinationLocation = destinationLocation
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def sync *attributes\n self.class.define_method(:sync_attributes) do\n ActiveSync::Sync.sync_attributes(self, attributes)\n end\n define_method(:sync_record) do\n ActiveSync::Sync.sync_record(self, attributes)\n end\n define_method(:sync_associations) do\n ... | [
"0.62257636",
"0.5998785",
"0.57728297",
"0.5667353",
"0.5642296",
"0.5577635",
"0.5577635",
"0.55185425",
"0.54901296",
"0.5441762",
"0.53627497",
"0.5348261",
"0.53388643",
"0.5336288",
"0.5321299",
"0.5321299",
"0.52901083",
"0.51853997",
"0.51853997",
"0.51853997",
"0.515... | 0.0 | -1 |
Get the local and remote content trees N Without this, we woulnd't have an way to get the source and destination content trees, which we need so that we can determine what files are present locally and remotely, and therefore which files need to be uploaded or deleted in order to sync the remote file system to the local one. | def getContentTrees
#N Without this, we wouldn't get the content tree for the local source location
@sourceContent = @sourceLocation.getContentTree()
#N Without this, we wouldn't get the content tree for the remote destination location
@destinationContent = @destinationLocation.getContentTree()
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_remote_files_hierarchy(files, root = '/', hierarchy = { dirs: [], files: [] })\n files.each do |node|\n case node['node_type']\n when 'branch'\n get_remote_files_hierarchy(node['files'], root + node['name'] + '/', hierarchy)\n when 'directory'\n hierarchy[:dirs] << \"#{root}#{node['name... | [
"0.60094804",
"0.59638125",
"0.595711",
"0.5803146",
"0.5629192",
"0.55500877",
"0.55458045",
"0.54757345",
"0.54165083",
"0.5400852",
"0.5381941",
"0.53772426",
"0.53558654",
"0.53524566",
"0.52339834",
"0.52116674",
"0.51814115",
"0.5152623",
"0.51410717",
"0.5110208",
"0.5... | 0.73691565 | 0 |
On the local and remote content trees, mark the copy and delete operations required to sync the remote location to the local location. N Without this, we woundn't have an easy way to mark the content trees for operations required to perform the sync | def markSyncOperations
#N Without this, the sync operations won't be marked
@sourceContent.markSyncOperationsForDestination(@destinationContent)
#N Without these puts statements, the user won't receive feedback about what sync operations (i.e. copies and deletes) are marked for execution
puts " ================================================ "
puts "After marking for sync --"
puts ""
puts "Local:"
#N Without this, the user won't see what local files and directories are marked for copying (i.e. upload)
@sourceContent.showIndented()
puts ""
puts "Remote:"
#N Without this, the user won't see what remote files and directories are marked for deleting
@destinationContent.showIndented()
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def doSync(options = {})\n #N Without this, the content files will be cleared regardless of whether :full options is specified\n if options[:full]\n #N Without this, the content files won't be cleared when the :full options is specified\n clearCachedContentFiles()\n end\n #N Witho... | [
"0.6713701",
"0.65546894",
"0.60874045",
"0.60850275",
"0.5955627",
"0.5849162",
"0.5838915",
"0.5838877",
"0.57865703",
"0.5759823",
"0.57272893",
"0.5712779",
"0.5710553",
"0.5710553",
"0.5710553",
"0.569446",
"0.5648276",
"0.56463933",
"0.56463933",
"0.5644414",
"0.5634724... | 0.7346809 | 0 |
Delete the local and remote cached content files (which will force a full recalculation of both content trees next time) N Without this, there won't be an easy way to delete all cached content files (thus forcing details for both content trees to be retrieved directly from the source & destination locations) | def clearCachedContentFiles
#N Without this, the (local) source cached content file won't be deleted
@sourceLocation.clearCachedContentFile()
#N Without this, the (remote) source cached content file won't be deleted
@destinationLocation.clearCachedContentFile()
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def clear_remote\n execute(:rm, '-rf', File.join(remote_cache_path, '*')) if test!(\"[ -d #{remote_cache_path} ]\")\n end",
"def delete_cache_files; end",
"def delete_cache_files; end",
"def clean_remote\n to_delete = remote_assets - local_compiled_assets\n to_delete.each do |f|\n ... | [
"0.68381095",
"0.68269175",
"0.68269175",
"0.6720919",
"0.64047134",
"0.6160758",
"0.6075725",
"0.6033957",
"0.6015497",
"0.5943525",
"0.5941422",
"0.59313303",
"0.5930817",
"0.5922106",
"0.5873191",
"0.5863049",
"0.58577955",
"0.58343303",
"0.5814217",
"0.57643545",
"0.5763"... | 0.8078011 | 0 |
Do the sync. Options: :full = true means clear the cached content files first, :dryRun means don't do the actual copies and deletes, but just show what they would be. N Without this, there won't be a single method that can be called to do the sync operations (optionally doing a dry run) | def doSync(options = {})
#N Without this, the content files will be cleared regardless of whether :full options is specified
if options[:full]
#N Without this, the content files won't be cleared when the :full options is specified
clearCachedContentFiles()
end
#N Without this, the required content information won't be retrieved (be it from cached content files or from the actual locations)
getContentTrees()
#N Without this, the required copy and delete operations won't be marked for execution
markSyncOperations()
#N Without this, we won't know if only a dry run is intended
dryRun = options[:dryRun]
#N Without this check, the destination cached content file will be cleared, even for a dry run
if not dryRun
#N Without this check, the destination cached content file will remain there, even though it is stale once an actual (non-dry-run) sync operation is started.
@destinationLocation.clearCachedContentFile()
end
#N Without this, the marked copy operations will not be executed (or in the case of dry-run, they won't be echoed to the user)
doAllCopyOperations(dryRun)
#N Without this, the marked delete operations will not be executed (or in the case of dry-run, they won't be echoed to the user)
doAllDeleteOperations(dryRun)
#N Without this check, the destination cached content file will be updated from the source content file, even if it was only a dry-run (so the remote location hasn't actually changed)
if (not dryRun and @destinationLocation.cachedContentFile and @sourceLocation.cachedContentFile and
File.exists?(@sourceLocation.cachedContentFile))
#N Without this, the remote cached content file won't be updated from local cached content file (which is a reasonable thing to do assuming the sync operation completed successfully)
FileUtils::Verbose.cp(@sourceLocation.cachedContentFile, @destinationLocation.cachedContentFile)
end
#N Without this, any cached SSH connections will remain unclosed (until the calling application has terminated, which may or may not happen soon after completing the sync).
closeConnections()
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def sync\n run 'sync', :quiet => true\n end",
"def sync\n local_directories.each do |local_directory|\n if settings[:dry_run]\n log.info(sync_command(local_directory))\n else\n IO.popen(sync_command(local_directory)).each { |line| handle_output(line) }\n... | [
"0.7001521",
"0.6518536",
"0.6421248",
"0.64205265",
"0.64205265",
"0.63177806",
"0.6235949",
"0.6235949",
"0.6191756",
"0.61621034",
"0.6099505",
"0.60952705",
"0.6016058",
"0.6016058",
"0.6016058",
"0.5947251",
"0.5947251",
"0.5939929",
"0.5930152",
"0.5921011",
"0.58695143... | 0.7811997 | 0 |
Do all the copy operations, copying local directories or files which are missing from the remote location N Without this, there won't be an easy way to execute (or echo if dryrun) all the marked copy operations | def doAllCopyOperations(dryRun)
#N Without this, the copy operations won't be executed
doCopyOperations(@sourceContent, @destinationContent, dryRun)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def doCopyOperations(sourceContent, destinationContent, dryRun)\n #N Without this loop, we won't copy the directories that are marked for copying\n for dir in sourceContent.dirs\n #N Without this check, we would attempt to copy those directories _not_ marked for copying (but which might still have... | [
"0.7283741",
"0.6752468",
"0.66221154",
"0.6568697",
"0.65367734",
"0.64332926",
"0.634112",
"0.63219005",
"0.62421733",
"0.6190614",
"0.61654466",
"0.61613727",
"0.6137517",
"0.6132605",
"0.61081344",
"0.60579455",
"0.5998268",
"0.5985354",
"0.59164345",
"0.5865034",
"0.5853... | 0.708499 | 1 |
Do all delete operations, deleting remote directories or files which do not exist at the local location N Without this, there won't be an easy way to execute (or echo if dryrun) all the marked delete operations | def doAllDeleteOperations(dryRun)
#N Without this, the delete operations won't be executed
doDeleteOperations(@destinationContent, dryRun)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def doDeleteOperations(destinationContent, dryRun)\n #N Without this loop, we won't delete all sub-directories or files and directories within sub-directories which have been marked for deletion\n for dir in destinationContent.dirs\n #N Without this check, we would delete directories which have no... | [
"0.7090771",
"0.64503205",
"0.64503205",
"0.6418074",
"0.63471127",
"0.6290854",
"0.6214927",
"0.6102063",
"0.60533196",
"0.6006432",
"0.600104",
"0.59897316",
"0.5942545",
"0.5941058",
"0.5897734",
"0.58603173",
"0.58548325",
"0.57968813",
"0.5748876",
"0.5741464",
"0.573183... | 0.7338213 | 0 |
Execute a (local) command, or, if dryRun, just pretend to execute it. Raise an exception if the process exit status is not 0. N Without this, there won't be an easy way to execute a local command, echoing it to the user, and optionally _not_ executing it if "dry run" is specified | def executeCommand(command, dryRun)
#N Without this, the command won't be echoed to the user
puts "EXECUTE: #{command}"
#N Without this check, the command will be executed, even though it is intended to be a dry run
if not dryRun
#N Without this, the command won't be executed (when it's not a dry run)
system(command)
#N Without this, a command that fails with error will be assumed to have completed successfully (which will result in incorrect assumptions in some cases about what has changed as a result of the command, e.g. apparently successful execution of sync commands would result in the assumption that the remote directory now matches the local directory)
checkProcessStatus(command)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def executeCommand(command, dryRun)\n #N Without this, the command won't be echoed\n puts \"EXECUTE: #{command}\"\n #N Without this check, the command will be executed even if it is meant to be a dry run\n if not dryRun\n #N Without this, the command won't actualy be execute even when it is meant ... | [
"0.7138665",
"0.68741983",
"0.6867353",
"0.66077614",
"0.65934044",
"0.6485354",
"0.64458686",
"0.64312",
"0.64247674",
"0.63332397",
"0.6329793",
"0.63161373",
"0.63060725",
"0.6296623",
"0.6239331",
"0.6091347",
"0.6079008",
"0.60544527",
"0.6041852",
"0.6034954",
"0.601323... | 0.74064827 | 0 |
Recursively perform all marked copy operations from the source content tree to the destination content tree, or if dryRun, just pretend to perform them. N Without this, there wouldn't be a way to copy files marked for copying in a source content tree to a destination content tree (or optionally do a dry run) | def doCopyOperations(sourceContent, destinationContent, dryRun)
#N Without this loop, we won't copy the directories that are marked for copying
for dir in sourceContent.dirs
#N Without this check, we would attempt to copy those directories _not_ marked for copying (but which might still have sub-directories marked for copying)
if dir.copyDestination != nil
#N Without this, we won't know what is the full path of the local source directory to be copied
sourcePath = sourceLocation.getFullPath(dir.relativePath)
#N Without this, we won't know the full path of the remote destination directory that this source directory is to be copied into
destinationPath = destinationLocation.getFullPath(dir.copyDestination.relativePath)
#N Without this, the source directory won't actually get copied
destinationLocation.contentHost.copyLocalToRemoteDirectory(sourcePath, destinationPath, dryRun)
else
#N Without this, we wouldn't copy sub-directories marked for copying of this sub-directory (which is not marked for copying in full)
doCopyOperations(dir, destinationContent.getDir(dir.name), dryRun)
end
end
#N Without this loop, we won't copy the files that are marked for copying
for file in sourceContent.files
#N Without this check, we would attempt to copy those files _not_ marked for copying
if file.copyDestination != nil
#N Without this, we won't know what is the full path of the local file to be copied
sourcePath = sourceLocation.getFullPath(file.relativePath)
#N Without this, we won't know the full path of the remote destination directory that this source directory is to be copied into
destinationPath = destinationLocation.getFullPath(file.copyDestination.relativePath)
#N Without this, the file won't actually get copied
destinationLocation.contentHost.copyLocalFileToRemoteDirectory(sourcePath, destinationPath, dryRun)
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def doAllCopyOperations(dryRun)\n #N Without this, the copy operations won't be executed\n doCopyOperations(@sourceContent, @destinationContent, dryRun)\n end",
"def markCopyOperations(destinationDir)\n #N Without this we can't loop over the immediate sub-directories to determine how each one n... | [
"0.7875499",
"0.6974648",
"0.68315053",
"0.6339092",
"0.6339092",
"0.62084794",
"0.61457634",
"0.6096766",
"0.60643005",
"0.60626996",
"0.5976893",
"0.5955779",
"0.5845902",
"0.5791096",
"0.5759361",
"0.57527834",
"0.5723526",
"0.56811875",
"0.5678704",
"0.5674515",
"0.565849... | 0.78498006 | 1 |
Recursively perform all marked delete operations on the destination content tree, or if dryRun, just pretend to perform them. N Without this, we wouldn't have a way to delete files and directories in the remote destination directory which have been marked for deletion (optionally doing it dry run only) | def doDeleteOperations(destinationContent, dryRun)
#N Without this loop, we won't delete all sub-directories or files and directories within sub-directories which have been marked for deletion
for dir in destinationContent.dirs
#N Without this check, we would delete directories which have not been marked for deletion (which would be incorrect)
if dir.toBeDeleted
#N Without this, we won't know the full path of the remote directory to be deleted
dirPath = destinationLocation.getFullPath(dir.relativePath)
#N Without this, the remote directory marked for deletion won't get deleted
destinationLocation.contentHost.deleteDirectory(dirPath, dryRun)
else
#N Without this, files and sub-directories within this sub-directory which are marked for deletion (even though the sub-directory as a whole hasn't been marked for deletion) won't get deleted.
doDeleteOperations(dir, dryRun)
end
end
#N Without this loop, we won't delete files within this directory which have been marked for deletion.
for file in destinationContent.files
#N Without this check, we would delete this file even though it's not marked for deletion (and therefore should not be deleted)
if file.toBeDeleted
#N Without this, we won't know the full path of the file to be deleted
filePath = destinationLocation.getFullPath(file.relativePath)
#N Without this, the file won't actually get deleted
destinationLocation.contentHost.deleteFile(filePath, dryRun)
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def doAllDeleteOperations(dryRun)\n #N Without this, the delete operations won't be executed\n doDeleteOperations(@destinationContent, dryRun)\n end",
"def recurse\n children = (self[:recurse] == :remote) ? {} : recurse_local\n\n if self[:target]\n recurse_link(children)\n elsif self[:... | [
"0.73455304",
"0.62522185",
"0.5752937",
"0.5606207",
"0.5529007",
"0.5500345",
"0.5478859",
"0.54616225",
"0.5436599",
"0.5418047",
"0.5400986",
"0.5392225",
"0.531366",
"0.5309123",
"0.5289061",
"0.5288361",
"0.52647686",
"0.52619386",
"0.52544004",
"0.5232471",
"0.52304286... | 0.8322386 | 0 |
N Without this there won't be any easy way to close cached SSH connections once the sync operations are all finished (and if we closed the connections as soon as we had finished with them, then we wouldn't be able to cache them) | def closeConnections
#N Without this, cached SSH connections to the remote system won't get closed
destinationLocation.closeConnections()
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def closeConnections()\n #N Without this, the connections won't be closed\n @sshAndScp.close()\n end",
"def closeConnections\n #N Without this the cached connections won't get closed\n @contentHost.closeConnections()\n end",
"def close\n @channel.close if @channel\n @channel... | [
"0.7057207",
"0.6657495",
"0.650974",
"0.65025324",
"0.64481646",
"0.64007884",
"0.6340067",
"0.6334801",
"0.62424344",
"0.6228436",
"0.6215895",
"0.613557",
"0.60430294",
"0.60430294",
"0.6030879",
"0.6011565",
"0.60023946",
"0.598022",
"0.59301937",
"0.5915533",
"0.5832246"... | 0.7086593 | 0 |
You arrived ten minutes too early to an appointment, so you decided to take the opportunity to go for a short walk. The city provides its citizens with a Walk Generating App on their phones everytime you press the button it sends you an array of oneletter strings representing directions to walk (eg. ['n', 's', 'w', 'e']). You always walk only a single block in a direction and you know it takes you one minute to traverse one city block, so create a function that will return true if the walk the app gives you will take you exactly ten minutes (you don't want to be early or late!) and will, of course, return you to your starting point. Return false otherwise. Note: you will always receive a valid array containing a random assortment of direction letters ('n', 's', 'e', or 'w' only). It will never give you an empty array (that's not a walk, that's standing still!). | def isValidWalk(walk)
x=0
y=0
valid=false
for d in walk
case d
when 'n'
y += 1
when 's'
y -= 1
when 'e'
x += 1
when 'w'
x -= 1
end
end
if walk.length === 10 && x===0 && y === 0
valid = true
end
p valid
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def isValidWalk(walk)\r\n starting_point = []\r\n current_direction = 0\r\n distance_travelled = []\r\n if walk.size != 10\r\n false\r\n else\r\n walk.each do |direction|\r\n distance_travelled = move_player(distance_travelled, direction)\r\n end\r\n if distance_travelled == starting_point\r\... | [
"0.7005325",
"0.64366823",
"0.6224846",
"0.6107727",
"0.6032032",
"0.60037327",
"0.5982154",
"0.59286284",
"0.58981097",
"0.5887284",
"0.58018595",
"0.57814914",
"0.57814914",
"0.5748638",
"0.5740135",
"0.5698428",
"0.56587684",
"0.5630898",
"0.55616754",
"0.5553746",
"0.5527... | 0.5829618 | 10 |
initialize(resource, data) Method to initialize a record. | def initialize(service, resource, data, record = nil)
@resource = resource
@data = data
@record = record
@service = service
proxy = resource.spigot(service)
@map = proxy.map if proxy.present?
@associations = map ? map.associations : []
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def initialize(record)\n @record = record\n end",
"def initialize(resource_id, data)\n @logger = Config.logger\n @event_log = Config.event_log\n @job = nil\n end",
"def initialize(record = nil, *)\n @record = record\n end",
"def initialize(type, data=nil)\n ... | [
"0.7118509",
"0.7033196",
"0.6960827",
"0.6943045",
"0.6808666",
"0.6808642",
"0.67895186",
"0.6647307",
"0.6548822",
"0.6522198",
"0.65044963",
"0.6475516",
"0.639454",
"0.63933253",
"0.6388948",
"0.63792115",
"0.6355791",
"0.63211405",
"0.62781566",
"0.6250165",
"0.62463236... | 0.6669831 | 7 |
instantiate Executes the initialize method on the implementing resource with formatted data. | def instantiate
resource.new(data)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def initialize(resource_json)\n @json = JSON.parse(resource_json)\n parse_base\n end",
"def initialize\n super()\n init_data()\n end",
"def new(*)\n load\n super\n end",
"def initialize(resource)\n raise Puppet::DevError, \"Got TransObject instead of ... | [
"0.65970284",
"0.64900386",
"0.6399915",
"0.63738805",
"0.6357284",
"0.6350854",
"0.6347237",
"0.6344723",
"0.63422",
"0.6325956",
"0.63258696",
"0.6325555",
"0.63118964",
"0.628525",
"0.6220928",
"0.6133075",
"0.6099715",
"0.60900176",
"0.60895646",
"0.6079195",
"0.6079195",... | 0.749517 | 0 |
create Executes the create method on the implementing resource with formatted data. | def create
data.is_a?(Array) ? create_by_array : create_by_hash(data)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create(data)\n @create_resource_mixin.create(data)\n end",
"def create(*args)\n raise NotImplementedError, 'Implement a method to create the resource.'\n end",
"def create\n Puppet.debug( \"#{self.resource.type}: CREATE #{resource[:name]}\" ) \n end",
"def create\n # TODO: impl... | [
"0.75633824",
"0.7417757",
"0.71390915",
"0.71153146",
"0.71123016",
"0.70413476",
"0.7010462",
"0.69727874",
"0.69727874",
"0.69426554",
"0.69426554",
"0.69426554",
"0.69426554",
"0.6935337",
"0.687718",
"0.686045",
"0.6859226",
"0.6855752",
"0.6855752",
"0.6855752",
"0.6855... | 0.0 | -1 |
update Assigns the formatted data to the resource and saves. | def update
record.assign_attributes(data)
record.save! if record.changed?
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update(...)\n assign_attributes(...)\n save\n end",
"def update!(**args)\n @formatted = args[:formatted] if args.key?(:formatted)\n @unformatted = args[:unformatted] if args.key?(:unformatted)\n end",
"def update\n\t\t\n\t\tend",
"def update\r\n end",
"def update!\n ... | [
"0.67310303",
"0.66759753",
"0.63827705",
"0.63112944",
"0.6310254",
"0.62747437",
"0.6273716",
"0.62702876",
"0.62598974",
"0.62514555",
"0.6243642",
"0.62237924",
"0.6221281",
"0.62209654",
"0.62141544",
"0.62141544",
"0.61882365",
"0.61814874",
"0.6170586",
"0.616605",
"0.... | 0.7098775 | 0 |
copy text between cursor and mark to clipboard | def copyregion(killafter = false, killadd = false)
return unless @mark
sel = [@cursor, @mark].sort
JTClipboard.addclip @text[sel.first...sel.last], killadd
if killafter
@text[sel.first...sel.last] = ""
@cursor = @mark if @cursor > @mark
end
resetmark
notify; @last = :copyregion
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def do_clipboard_cut\n if do_clipboard_copy()\n pos = @selection.first\n self.gui_set_value(nil, '')\n self.cur_pos = pos\n end\n end",
"def copy_to_clipboard\n end",
"def paste; clipboard_paste; end",
"def do_copy\n @editor.value = @current_copycode\n @editor.focus\n... | [
"0.7162748",
"0.70253325",
"0.69465905",
"0.68257546",
"0.67577547",
"0.6701359",
"0.6651931",
"0.66292113",
"0.652758",
"0.64711463",
"0.64573044",
"0.64228433",
"0.6377173",
"0.63673776",
"0.6274037",
"0.6267426",
"0.6252942",
"0.6209267",
"0.6203499",
"0.61606514",
"0.6157... | 0.7296873 | 0 |
moves text between cursor and mark to clipboard | def killregion(killadd = false)
copyregion true, killadd
@last = :killregion
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def do_clipboard_cut\n if do_clipboard_copy()\n pos = @selection.first\n self.gui_set_value(nil, '')\n self.cur_pos = pos\n end\n end",
"def copyregion(killafter = false, killadd = false)\n return unless @mark\n sel = [@cursor, @mark].sort\n JTClipboard.addclip @text[se... | [
"0.74801946",
"0.73943496",
"0.73472476",
"0.68966055",
"0.67716205",
"0.6762277",
"0.6719375",
"0.6708828",
"0.6676742",
"0.6588983",
"0.64937186",
"0.64350796",
"0.63764286",
"0.6261668",
"0.6258935",
"0.6249099",
"0.62014",
"0.6192762",
"0.6164518",
"0.6120206",
"0.6093522... | 0.0 | -1 |
moves text between cursor and eol to clipboard, delete line empty consecutive killtoeol calls cumulate to one clipboard entry | def killtoeol
char = @text[@cursor, 1]
if char == "\n"
killto :killtoeol do movetoright end
else
killto :killtoeol do movetoeol end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def do_clipboard_cut\n if do_clipboard_copy()\n pos = @selection.first\n self.gui_set_value(nil, '')\n self.cur_pos = pos\n end\n end",
"def paste_before()\n clipboard = send_message(\"getClipboardContents\" => [])[\"clipboardContents\"]\n if clipboard[-1].chr == \"\\n\"\n... | [
"0.67969537",
"0.6596287",
"0.6181221",
"0.6146281",
"0.6015248",
"0.5967393",
"0.59610367",
"0.5960387",
"0.5933118",
"0.59182364",
"0.5839885",
"0.57057226",
"0.5688946",
"0.56852984",
"0.5641738",
"0.561514",
"0.5598298",
"0.5547041",
"0.55289435",
"0.54814607",
"0.5476521... | 0.67587405 | 1 |
expand set to false (default) means that subnodes will not be drawn | def initialize(*params, &block)
@expand = false
super
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def expand\n expanding_vertices, @apical_vertices, @dist_thresh = @apical_vertices.clone, [], @dist_thresh.next\n expanding_vertices.each {|vertex| merge_with_edges(generate_subgraph(vertex))}\n self\n end",
"def expand\n @state.neighbors.map{|n|\n AStarNode.new(n, self)\n }\n end",... | [
"0.6218303",
"0.60740393",
"0.59683406",
"0.58888197",
"0.5835355",
"0.57563967",
"0.5654657",
"0.563873",
"0.5632658",
"0.5627397",
"0.55157226",
"0.5503459",
"0.5453877",
"0.5447366",
"0.5407305",
"0.5363828",
"0.53593504",
"0.5358762",
"0.53344333",
"0.53266793",
"0.529115... | 0.48733154 | 49 |
Format of auth.yml: consumer_key: (from osm.org) consumer_secret: (from osm.org) token: (use oauth setup flow to get this) token_secret: (use oauth setup flow to get this) The consumer key and consumer secret are the identifiers for this particular application, and are issued when the application is registered with the site. Use your own. | def test_local
auth = YAML.load(File.open('local_auth.yaml'))
@consumer=OAuth::Consumer.new auth['consumer_key'],
auth['consumer_secret'],
{:site=>"http://localhost:3000"}
# Create the access_token for all traffic
return OAuth::AccessToken.new(@consumer, auth['token'], auth['token_secret'])
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def consumer_secret; config[:consumer_secret]; end",
"def oauth\n {\n consumer_key: @consumer_key,\n consumer_secret: @consumer_secret,\n token: @token,\n token_secret: @token_secret\n }\n end",
"def token_secret; config[:token_secret]; end",
"def authenticate (oauth_... | [
"0.7173915",
"0.70331264",
"0.6911294",
"0.6880273",
"0.67467296",
"0.6710576",
"0.662208",
"0.6619271",
"0.65218675",
"0.65190595",
"0.6505039",
"0.650424",
"0.6424107",
"0.6410788",
"0.6410788",
"0.6410788",
"0.6395793",
"0.6393676",
"0.6386917",
"0.6375623",
"0.63683295",
... | 0.67301637 | 5 |
need to override the json view to return what full_calendar is expecting. | def as_json(options = {})
{
:id => self.id,
:title => self.name,
:description => self.description || "",
:start => starts_at.rfc822,
:end => ends_at.rfc822,
:url => Rails.application.routes.url_helpers.select_path(id),
#:color => "red"
}
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def as_json(options = {})\n {\n :id => self.id,\n :title => self.title,\n :description => self.description || \"\",\n :start => unless starts_at.blank? then starts_at.rfc822 else \"\" end,\n :end => unless ends_at.blank? then ends_at.rfc822 else \"\" end,\n :allDay => self.all_da... | [
"0.7442322",
"0.7395148",
"0.7354672",
"0.73337483",
"0.7333633",
"0.72639316",
"0.72342384",
"0.7227933",
"0.7221674",
"0.7163745",
"0.7143839",
"0.71263796",
"0.7060929",
"0.7058184",
"0.7036862",
"0.7031318",
"0.7006431",
"0.6857564",
"0.6796224",
"0.6760483",
"0.67209804"... | 0.66245323 | 26 |
GET /nurses/1 GET /nurses/1.json | def show
@nurse = Nurse.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @nurse }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def show\n @nail_salon = NailSalon.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @nail_salon }\n end\n end",
"def show\n @nugget = Nugget.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n form... | [
"0.60660243",
"0.5961539",
"0.5942432",
"0.5841825",
"0.57584375",
"0.57443655",
"0.5744085",
"0.5731102",
"0.5731102",
"0.5712483",
"0.5704033",
"0.56945145",
"0.56624126",
"0.5638775",
"0.5638775",
"0.5638775",
"0.5629483",
"0.5583855",
"0.557659",
"0.5575616",
"0.5567756",... | 0.5800752 | 4 |
GET /nurses/new GET /nurses/new.json | def new
@nurse = Nurse.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @nurse }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new\n @nail_salon = NailSalon.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @nail_salon }\n end\n end",
"def new\n @newspage = Newspage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @news... | [
"0.72123146",
"0.702307",
"0.70105606",
"0.69525266",
"0.69315284",
"0.68915606",
"0.68661326",
"0.68593687",
"0.68593687",
"0.68593687",
"0.6857373",
"0.6853299",
"0.68462634",
"0.6836695",
"0.6831094",
"0.6831094",
"0.68307436",
"0.6830159",
"0.68199927",
"0.68199927",
"0.6... | 0.6925118 | 5 |
POST /nurses POST /nurses.json | def create
@nurse = Nurse.new(params[:nurse])
respond_to do |format|
if @nurse.save
format.html { redirect_to @nurse, notice: 'Nurse was successfully created.' }
format.json { render json: @nurse, status: :created, location: @nurse }
else
format.html { render action: "new" }
format.json { render json: @nurse.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n @onsen = Onsen.new(onsen_params)\n\n respond_to do |format|\n if @onsen.save\n format.html { redirect_to @onsen, notice: 'Onsen was successfully created.' }\n format.json { render :show, status: :created, location: @onsen }\n else\n format.html { render :new }\n ... | [
"0.6039991",
"0.5876144",
"0.58288634",
"0.57932293",
"0.5763098",
"0.57344013",
"0.5723912",
"0.563932",
"0.56129754",
"0.5584299",
"0.5580586",
"0.55512553",
"0.5532478",
"0.5512731",
"0.5511791",
"0.54805624",
"0.54601204",
"0.5436382",
"0.5435385",
"0.5433422",
"0.5419972... | 0.54143447 | 22 |
PUT /nurses/1 PUT /nurses/1.json | def update
@nurse = Nurse.find(params[:id])
respond_to do |format|
if @nurse.update_attributes(params[:nurse])
format.html { redirect_to @nurse, notice: 'Nurse was successfully updated.' }
format.json { head :no_content }
else
format.html { render action: "edit" }
format.json { render json: @nurse.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update \n sneaker = find_sneaker\n # update! exceptions will be handled by the rescue_from ActiveRecord::RecordInvalid code\n sneaker.update(sneaker_params)\n render json: sneaker\n end",
"def update\n respond_to do |format|\n if @siren.update(siren_params)\n fo... | [
"0.5973854",
"0.58009416",
"0.5767399",
"0.57311505",
"0.5721174",
"0.571451",
"0.56857646",
"0.5647851",
"0.5642263",
"0.5586154",
"0.55436873",
"0.5542817",
"0.5541869",
"0.5535166",
"0.553049",
"0.55273074",
"0.55051553",
"0.54948145",
"0.5493343",
"0.5490147",
"0.54842496... | 0.5508139 | 16 |
DELETE /nurses/1 DELETE /nurses/1.json | def destroy
@nurse = Nurse.find(params[:id])
@nurse.destroy
respond_to do |format|
format.html { redirect_to nurses_url }
format.json { head :no_content }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def destroy\n @nail_salon = NailSalon.find(params[:id])\n @nail_salon.destroy\n\n respond_to do |format|\n format.html { redirect_to nail_salons_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @nineteen.destroy\n respond_to do |format|\n format.html { redir... | [
"0.7050792",
"0.7045374",
"0.6952265",
"0.68477404",
"0.6820873",
"0.68013835",
"0.68013835",
"0.67699885",
"0.67519873",
"0.6750785",
"0.6740858",
"0.6732774",
"0.67094",
"0.6706535",
"0.6698785",
"0.6690022",
"0.6688474",
"0.66792953",
"0.6673355",
"0.667132",
"0.66699004",... | 0.6906585 | 3 |
CS169PGM GOOGLE METHODS BEGIN | def fetch_project_data
response = fetch_data(@@SETTINGS.project_tab)
unless response.nil?
adjust_projects response
return true
end
false
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getgm() end",
"def detect_landmarks path_to_image_file:\n # [START get_vision_service]\n vision = Google::Cloud::Vision.new\n # [END get_vision_service]\n\n # [START construct_request]\n image = vision.image path_to_image_file\n landmark = image.landmark\n # [END construct_request]\n\n # [START pr... | [
"0.5996517",
"0.57838815",
"0.56707555",
"0.55488425",
"0.54202586",
"0.53574556",
"0.53219384",
"0.53174603",
"0.5304487",
"0.5304487",
"0.5304487",
"0.5304487",
"0.5304487",
"0.5304487",
"0.5304487",
"0.5304487",
"0.5282467",
"0.52805656",
"0.5247497",
"0.5245725",
"0.52139... | 0.0 | -1 |
Fetches the data from the google sheet | def fetch_group_data
response = fetch_data(@@SETTINGS.group_tab)
unless response.nil?
adjust_groups response
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_cellfeed(spreadsheet_key)\n cellfeed_uri = \"http://spreadsheets.google.com/feeds/cells/#{spreadsheet_key}/od6/private/full\"\n cellfeed_response = get_feed(cellfeed_uri) \n create_datastructure_from_xml(cellfeed_response.body)\n \n end",
"def read_score_table service, spreadsheet_id\n ... | [
"0.6932061",
"0.66842407",
"0.66727114",
"0.6572149",
"0.6510849",
"0.6304237",
"0.63012975",
"0.6237194",
"0.62339646",
"0.62320703",
"0.619917",
"0.615057",
"0.6117842",
"0.61017716",
"0.6063502",
"0.6045861",
"0.60454655",
"0.6030018",
"0.601348",
"0.60029036",
"0.59935117... | 0.0 | -1 |
Takes the google sheet response and generates all the groups from it | def adjust_groups(response)
Group.destroy_all
delete_matches(response)
create_groups(response)
redirect_to root_path
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getGroups\n groups = $gm.get(\"/groups\", @token, \"per_page=100\")\n group_ids = Array.new\n\n groups['response'].each do |group|\n group_ids.push({\n 'name' => group['name'],\n 'group_id' => group['id'],\n 'image' => group['image_ur... | [
"0.66396314",
"0.6445772",
"0.63362515",
"0.61798525",
"0.61107934",
"0.61091864",
"0.60524076",
"0.60028595",
"0.59868133",
"0.5980988",
"0.59232754",
"0.59211445",
"0.5856983",
"0.58489615",
"0.58300185",
"0.58300185",
"0.57403326",
"0.57222676",
"0.5701474",
"0.56954473",
... | 0.5506566 | 31 |
Ensure valid credentials, either by restoring from the saved credentials files or intitiating an OAuth2 authorization. If authorization is required, the user's default browser will be launched to approve the request. | def authorize(force_reload)
FileUtils.mkdir_p(File.dirname(CREDENTIALS_PATH))
client_id = Google::Auth::ClientId.from_file(CLIENT_SECRETS_PATH)
token_store = Google::Auth::Stores::FileTokenStore.new(file: CREDENTIALS_PATH)
authorizer = Google::Auth::UserAuthorizer.new(
client_id, SCOPE, token_store)
user_id = 'default'
credentials = authorizer.get_credentials(user_id)
if force_reload || credentials.nil?
session[:is_authorized] = false
redirect_to google_fetch_path
return
end
credentials
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def authorize\r\n client_id = Google::Auth::ClientId.from_file CREDENTIALS_PATH\r\n token_store = Google::Auth::Stores::FileTokenStore.new file: TOKEN_PATH\r\n authorizer = Google::Auth::UserAuthorizer.new client_id, SCOPE, token_store\r\n user_id = \"default\"\r\n credentials = authorizer.get_credentials use... | [
"0.6933457",
"0.6722908",
"0.6659495",
"0.6651997",
"0.65571547",
"0.6526237",
"0.6499632",
"0.6494427",
"0.64732134",
"0.6464933",
"0.6456424",
"0.6455739",
"0.6452815",
"0.64510006",
"0.64500266",
"0.6441912",
"0.64387083",
"0.6409544",
"0.6401161",
"0.6396016",
"0.6394851"... | 0.6271662 | 65 |
Gets all the bookmark reports from the database Should return true if the length is 4 | def test_get_all_bookmark_reports
reports = BookmarkReport.getAll()
assert_equal 4, reports.length
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_all_bookmark_reports_details\n \n reports = BookmarkReport.getAll()\n\n report = reports[0]\n \n assert_equal 1, report.bookmarkId\n assert_equal 1, report.userId\n assert_equal 'something', report.issue\n assert_equal 'report', report.descriptio... | [
"0.68343604",
"0.635041",
"0.6318052",
"0.6210739",
"0.6161618",
"0.6140696",
"0.5994113",
"0.5956108",
"0.59277654",
"0.5842043",
"0.58030504",
"0.5801819",
"0.57237595",
"0.5700349",
"0.56992793",
"0.5670509",
"0.56472856",
"0.56195724",
"0.55992025",
"0.5591075",
"0.554167... | 0.749952 | 0 |
Gets the first bookmark report from the database: bookmark ID = 1, user ID = 1, issue = something, description = report Should return for all the tests | def test_get_all_bookmark_reports_details
reports = BookmarkReport.getAll()
report = reports[0]
assert_equal 1, report.bookmarkId
assert_equal 1, report.userId
assert_equal 'something', report.issue
assert_equal 'report', report.description
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def external_report\n return external_reports.first\n end",
"def test_get_by_bookmark_id\n\n validId = BookmarkReport.getById(1)\n\n assert_equal 1, validId.userId\n assert_equal 'something', validId.issue\n assert_equal 'report', validId.description\n\n invalidId = Bookmar... | [
"0.64918804",
"0.6290008",
"0.59689224",
"0.59628683",
"0.5729023",
"0.572567",
"0.56150216",
"0.55334824",
"0.54678637",
"0.5370985",
"0.5329327",
"0.5287675",
"0.5277566",
"0.5253635",
"0.5223554",
"0.52191067",
"0.52067536",
"0.51553077",
"0.51386863",
"0.5133258",
"0.5118... | 0.68113977 | 0 |
Adds a report into the database, one that respects the field requiremenets and one that does not Should return true for the first report because it respects the requiremenets and false for the second one | def test_new_report
testOne = BookmarkReport.newReport(2, 1, "issue", "description");
assert_equal true, testOne
testTwo = BookmarkReport.newReport(nil, 1, " ", "test");
assert_equal false, testTwo
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def add_report\n # TODO: should traverse listener queue for conditions and callbacks\n if @rain == :warning or @rain == :imminent\n @site.reports.create\n end\n end",
"def can_ba_create_report?\n (status == Service.status_confirmed || status == Service.status_conducted) && report.nil? \n en... | [
"0.6323783",
"0.5899068",
"0.58449686",
"0.5810592",
"0.57838315",
"0.57838315",
"0.57222235",
"0.5713861",
"0.5705615",
"0.56574976",
"0.5628815",
"0.5622186",
"0.55423266",
"0.5486699",
"0.5476326",
"0.54718643",
"0.54245496",
"0.54056215",
"0.5400118",
"0.5382249",
"0.5373... | 0.5089973 | 57 |
Gets a report of a bookmark that has ID = 1 and a bookmark that has ID = 0 Should return true for all the tests made for ID = 1 and ID = 0, because there does not exist a bookmark with ID = 0 | def test_get_by_bookmark_id
validId = BookmarkReport.getById(1)
assert_equal 1, validId.userId
assert_equal 'something', validId.issue
assert_equal 'report', validId.description
invalidId = BookmarkReport.getById(0)
assert_nil invalidId
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_all_bookmark_reports_details\n \n reports = BookmarkReport.getAll()\n\n report = reports[0]\n \n assert_equal 1, report.bookmarkId\n assert_equal 1, report.userId\n assert_equal 'something', report.issue\n assert_equal 'report', report.descriptio... | [
"0.6570599",
"0.62345517",
"0.5988543",
"0.5978591",
"0.58733493",
"0.5760648",
"0.57567286",
"0.56794226",
"0.5605636",
"0.5583483",
"0.55218905",
"0.54790664",
"0.5436384",
"0.5394215",
"0.5359893",
"0.5332839",
"0.53305566",
"0.53033876",
"0.5248693",
"0.52171856",
"0.5145... | 0.6691889 | 0 |
Gets all the reports from the database and deletes them Should return true as they do not exist anymore | def test_delete_reports
reports = BookmarkReport.getAll()
for report in reports
test = BookmarkReport.deleteReport(report.reportId)
assert_equal true, test
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def delete_all_reports!\n reports=get_report_list\n reports.each do |rep|\n rep[:ReportID]\n delete_report(rep[:ReportID])\n end\n end",
"def delete_data\n Report.plugin_matrix.each do |resource_name, measurements|\n model = Object.const_get((resource_name + 'Resource').ca... | [
"0.7976635",
"0.6659144",
"0.6492318",
"0.64480263",
"0.6422423",
"0.64003795",
"0.6328358",
"0.63014066",
"0.62634295",
"0.62156916",
"0.6170256",
"0.6162276",
"0.6153949",
"0.6148952",
"0.61148447",
"0.6107269",
"0.61024606",
"0.607564",
"0.60746294",
"0.60601956",
"0.60386... | 0.7615873 | 1 |
GET /leilaos/1 GET /leilaos/1.xml | def show
@leilao = Leilao.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.xml { render :xml => @leilao }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def index\n @lieus = Lieu.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @lieus }\n end\n end",
"def xml(id)\n http.get(\"/nfse/#{id}/xml\") do |response|\n response.headers.fetch(\"Location\") { \"\" }\n end\n end",
... | [
"0.6636714",
"0.64375603",
"0.64191407",
"0.63023424",
"0.63012815",
"0.62957734",
"0.626545",
"0.62618005",
"0.6166575",
"0.6150047",
"0.60904324",
"0.60896915",
"0.60833794",
"0.6079378",
"0.60776573",
"0.6071869",
"0.6053767",
"0.6053576",
"0.60315585",
"0.601692",
"0.6015... | 0.6682231 | 0 |
GET /leilaos/new GET /leilaos/new.xml | def new
@leilao = Leilao.new
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @leilao }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new\n @lien = Lien.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @lien }\n end\n end",
"def new\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => new_vurl }\n end\n end",
"def new\n @lote... | [
"0.73974955",
"0.7360316",
"0.726974",
"0.7250252",
"0.7227784",
"0.72236747",
"0.71942735",
"0.71741647",
"0.71721184",
"0.7165546",
"0.71629995",
"0.7161596",
"0.7156797",
"0.71560305",
"0.71560305",
"0.7120607",
"0.7087882",
"0.7071855",
"0.7070232",
"0.70670885",
"0.70603... | 0.7479105 | 0 |
POST /leilaos POST /leilaos.xml | def create
@leilao = Leilao.new(params[:leilao])
respond_to do |format|
if @leilao.save
flash[:notice] = 'Leilao was successfully created.'
format.html { redirect_to(@leilao) }
format.xml { render :xml => @leilao, :status => :created, :location => @leilao }
else
format.html { render :action => "new" }
format.xml { render :xml => @leilao.errors, :status => :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create(name=\"Default Name\", age=\"50\")\r\n xml_req =\r\n \"<?xml version='1.0' encoding='UTF-8'?>\r\n <person>\r\n <name>#{name}</name>\r\n <age>#{age}</age>\r\n </person>\"\r\n \r\n request = Net::HTTP::Post.new(@url)\r\n request.add_field \"Content-Type\", \"application/xml\"\r... | [
"0.6363825",
"0.6350087",
"0.6256222",
"0.6171393",
"0.6064402",
"0.6039733",
"0.59295046",
"0.57941824",
"0.5787358",
"0.57157487",
"0.5626261",
"0.56200886",
"0.56197876",
"0.5602025",
"0.55950296",
"0.55920094",
"0.55840456",
"0.5570361",
"0.55560124",
"0.5547443",
"0.5542... | 0.64394903 | 0 |
PUT /leilaos/1 PUT /leilaos/1.xml | def update
@leilao = Leilao.find(params[:id])
respond_to do |format|
if @leilao.update_attributes(params[:leilao])
flash[:notice] = 'Leilao was successfully updated.'
format.html { redirect_to(@leilao) }
format.xml { head :ok }
else
format.html { render :action => "edit" }
format.xml { render :xml => @leilao.errors, :status => :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def put(uri, xml)\r\n req = Net::HTTP::Put.new(uri)\r\n req[\"content-type\"] = \"application/xml\"\r\n req.body = xml\r\n request(req)\r\n end",
"def update opts = {}\n opts[:headers] ||= {}\n opts[:headers]['Content-Type'] ||= 'text/xml'\n post 'update', opts\n end",
"def res... | [
"0.6548726",
"0.64226365",
"0.6311103",
"0.6209257",
"0.6071829",
"0.5967112",
"0.57638544",
"0.5714797",
"0.57015836",
"0.5687",
"0.56332886",
"0.562034",
"0.56100273",
"0.560201",
"0.5591202",
"0.55870014",
"0.55684",
"0.55666345",
"0.55553526",
"0.5555261",
"0.5554824",
... | 0.619678 | 4 |
DELETE /leilaos/1 DELETE /leilaos/1.xml | def destroy
@leilao = Leilao.find(params[:id])
@leilao.destroy
respond_to do |format|
format.html { redirect_to(leilaos_url) }
format.xml { head :ok }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def destroy\n RestClient.delete \"#{REST_API_URI}/contents/#{id}.xml\" \n self\n end",
"def delete()\n response = send_post_request(@xml_api_delete_path)\n response.is_a?(Net::HTTPSuccess) or response.is_a?(Net::HTTPRedirection)\n end",
"def destroy\n @aisle = Aisle.find(params[:id])\n ... | [
"0.6976416",
"0.6762079",
"0.6693905",
"0.6679505",
"0.6605138",
"0.6523554",
"0.6515991",
"0.6515894",
"0.6515249",
"0.6499209",
"0.6494185",
"0.64905745",
"0.64905196",
"0.6454576",
"0.6454576",
"0.64431274",
"0.64132154",
"0.6401562",
"0.6393576",
"0.6376159",
"0.6369727",... | 0.69837344 | 0 |
Attempt to take 3 observations that would violate system constraints. needs autonomy to manage the capacitor selection need autonomy to make sure capacitors don't go too low. | def medium_test
wait(20) # capture results from the easy test...
5.times do
cmd("CFS CFS_WHE_OBS_START")
wait(20)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def restriction \n end",
"def caloric_requirement\n age = current_age\n bmr = basal_metabolic_rate(age)\n if strength == 1\n caloric_requirement_st1(age, bmr).to_i\n elsif strength == 2\n caloric_requirement_st2(age, bmr).to_i\n else\n caloric_requirement_st3(age, bmr).to_i\n ... | [
"0.5359062",
"0.528268",
"0.52245426",
"0.5198735",
"0.51801103",
"0.5132356",
"0.51121503",
"0.51114583",
"0.5086593",
"0.5052438",
"0.5048837",
"0.5040801",
"0.4977414",
"0.4968954",
"0.4964098",
"0.49550056",
"0.4899717",
"0.48882034",
"0.48688716",
"0.4868648",
"0.4863882... | 0.0 | -1 |
Attempt to take 3 observations while simultaneously injecting commands to handle heaters and louvers manually. | def hard_test
wait(10) # let some capacitor get up some charge.
5.times do
wait(5)
cmd("CFS CFS_WHE_OBS_START")
wait(5)
cmd("CFS CFS_WHE_HTR_ON")
wait(5)
cmd("CFS CFS_WHE_LOUVER_CLOSE")
wait(5)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update_phase3_basic_command\n # If B button was pressed\n if Input.trigger?(Input::B)\n # Play cancel SE\n $game_system.se_play($data_system.cancel_se)\n # Go to command input for previous actor\n phase3_prior_actor\n return\n end\n # If C button was pressed\n if Input.t... | [
"0.5831192",
"0.579489",
"0.57114065",
"0.5547119",
"0.55374914",
"0.5527577",
"0.54372317",
"0.54234296",
"0.5413138",
"0.5323597",
"0.5283924",
"0.52831274",
"0.52571297",
"0.5256628",
"0.52371",
"0.5200598",
"0.5190458",
"0.51765645",
"0.5168584",
"0.51361436",
"0.5113928"... | 0.4775092 | 57 |
Methods for custom attributes | def organizations
orgs = []
Organization.all.each do |o|
orgs.push(Api::V1::OrganizationSerializer.new(o, @instance_options).attributes)
end
orgs
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def attr(name); end",
"def attribute(name); end",
"def attr; end",
"def method_missing(method_name, *args, &block)\n return super unless define_attribute_methods\n self.send(method_name, *args, &block)\n end",
"def method_missing(meth, *args, &blk)\n if args.length > 0\n... | [
"0.7651348",
"0.764791",
"0.7601169",
"0.7329614",
"0.7284593",
"0.72721165",
"0.72721165",
"0.72721165",
"0.72721165",
"0.72721165",
"0.72721165",
"0.72721165",
"0.7250274",
"0.7250274",
"0.7194783",
"0.7194783",
"0.7194783",
"0.7194783",
"0.7194783",
"0.7194783",
"0.7194783... | 0.0 | -1 |
Store a prefix in the trie, and associate a value with it | def []=(prefix, value)
current = @root
current_prefix = prefix
while current_prefix != ""
current, current_prefix = find_canididate_insertion_node(current, current_prefix)
end
current[:value] = value
return value
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def add_pattern(prefix, pattern, &block)\n @trie[prefix] ||= {}\n @trie[prefix][pattern] = block\n end",
"def add_word_to_trie(word)\n chars = word.downcase.split('')\n crawl = root\n\n chars.each do |char|\n child = crawl.children\n if child.keys.include?(char)\n crawl = child[c... | [
"0.6257317",
"0.6109661",
"0.60767347",
"0.60545164",
"0.6041448",
"0.5992668",
"0.5987903",
"0.5943061",
"0.58996505",
"0.5882091",
"0.58446103",
"0.581478",
"0.5721165",
"0.5716789",
"0.5713481",
"0.5680035",
"0.56740385",
"0.56301206",
"0.5627325",
"0.55971026",
"0.5574934... | 0.7424172 | 0 |
Perform a prefix search. Will return the value associated with the longest prefix | def [](prefix)
current = @root
current_prefix = prefix
while !current.nil? && current_prefix != ""
previous = current
current, current_prefix = next_node(current, current_prefix)
end
return current[:value] if current
return previous[:value]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def longest_prefix(str, pos= 0, len= -1, match_prefix= false)\n end",
"def find_prefix(prefix)\n node = find_word prefix.downcase\n if node.nil?\n [false, false, 0]\n else\n count = node.word_count\n count -= 1 if node.is_word\n [true, node.is_word, count]\n end\n end",
... | [
"0.7384357",
"0.71830565",
"0.6922366",
"0.68519425",
"0.6821444",
"0.67653644",
"0.67599136",
"0.6738252",
"0.6723146",
"0.67081183",
"0.66819835",
"0.6673414",
"0.6670009",
"0.6650048",
"0.6625341",
"0.66021806",
"0.6599446",
"0.6592044",
"0.65887475",
"0.65446365",
"0.6522... | 0.572012 | 71 |
Set a value in the trie if it isn't null. Can be used to initialize collections as values | def set_if_nil(word, value)
current = @root
current_prefix = word
while current_prefix != ""
current, current_prefix = find_canididate_insertion_node(current, current_prefix)
end
current[:value] ||= value
return current[:value]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set!(value_obj)\n\t\t\tinsist!()\n\t\t\t@lookup[0...-1].inject(@obj_with_keys) { |deep_obj, this_key|\n\t\t\t\tdeep_obj[this_key]\n\t\t\t}[@lookup[-1]] = value_obj\n\t\tend",
"def set_value(node, path, value)\n path = path.to_s.split('.') unless path.is_a?(Array)\n\n path[0..-2].each_index ... | [
"0.6495417",
"0.642123",
"0.6335264",
"0.62533474",
"0.59544724",
"0.59496105",
"0.59496105",
"0.5929919",
"0.58415437",
"0.5804963",
"0.57917076",
"0.57859576",
"0.57849115",
"0.5783281",
"0.57791805",
"0.57625985",
"0.57564473",
"0.57271314",
"0.57152843",
"0.5704708",
"0.5... | 0.72538346 | 0 |
Perform a prefix search, and return all values in the trie that have this prefix | def match(prefix)
result = []
current = @root
current_prefix = prefix
while current != nil && current_prefix != ""
previous, previous_prefix = current, current_prefix
current, current_prefix = next_node(current, current_prefix)
end
unless current
if current_prefix
return []
else
next_nodes = previous[:nodes].select { |prefix, node| prefix.start_with?(previous_prefix) }.values
end
else
next_nodes = [current]
end
until next_nodes.empty?
current = next_nodes.pop
result << current[:value]
current[:nodes].each { |prefix, node| next_nodes.push(node) }
end
return result.compact
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_matching_strings(prefix)\n puts \"Matching for #{prefix}\"\n ptr = @root\n for i in 0..prefix.size-1\n ptr = ptr.children[prefix[i]]\n return nil unless ptr\n end\n arr = []\n arr << prefix if ptr.is_leaf\n arr << get_strings(ptr, prefix)\n arr\n end",
"def find(prefix)... | [
"0.7765639",
"0.7521391",
"0.75053906",
"0.7404824",
"0.74030316",
"0.73121566",
"0.71918255",
"0.71758324",
"0.71754545",
"0.71599686",
"0.7131109",
"0.7068194",
"0.6899894",
"0.6880155",
"0.6787459",
"0.66669923",
"0.6611561",
"0.66041136",
"0.6597065",
"0.6502491",
"0.6486... | 0.7594333 | 1 |
get the node for insertion, splitting intermediary nodes as necessary | def find_canididate_insertion_node(current, key)
if current[:key_length].nil?
new_node = insert_node(current, key)
current[:key_length] = key.length
return new_node, ""
end
# check if we have an existing shared prefix already
current_key = key[0...current[:key_length]]
# look for an existing key path
if current[:nodes].has_key?(current_key)
return current[:nodes][current_key], key[current_key.length..-1]
end
# search for a shared prefix, and split all the nodes if necessary
current[:nodes].keys.each do |prefix|
common_prefix = shared_prefix(key, prefix)
next unless common_prefix
new_key_length = common_prefix.length
split_nodes(current, new_key_length)
return current[:nodes][common_prefix], key[new_key_length..-1]
end
# potentially split all other keys
if current_key.length < current[:key_length]
split_nodes(current, current_key.length)
end
new_node = insert_node(current, current_key)
return new_node, key[current_key.length..-1]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def insert(node, &block); end",
"def insert(node, &block); end",
"def insert_node\n insert_node_helper(@root)\n end",
"def insert_predecessor\n insert_node('insert_predecessor')\n end",
"def insert_node(index, word, definition)\n node_before_index = find_node(index - 1)\n node_at_current_... | [
"0.6867058",
"0.6867058",
"0.65956396",
"0.629302",
"0.6283602",
"0.6233717",
"0.61871123",
"0.61863637",
"0.6173093",
"0.615837",
"0.61381793",
"0.61224467",
"0.61005735",
"0.6075693",
"0.606866",
"0.6068604",
"0.6068337",
"0.605623",
"0.60474813",
"0.60417694",
"0.601125",
... | 0.63288593 | 3 |
split all the branches in the given root to the given length | def split_nodes(root, new_length)
old_nodes = root[:nodes]
split_length = root[:key_length] - new_length
root[:key_length] = new_length
root[:nodes] = {}
old_nodes.each do |key, old|
new_node = insert_node(root, key[0...new_length])
new_node[:nodes][key[new_length..-1]] = old
new_node[:key_length] = split_length
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cut(num_segments)\n t = direction*(1.0/num_segments)\n ret = [@root]\n num_segments.times do |i|\n ret << @root + t*(i + 1)\n end\n ret\n end",
"def convert_linked_list_to_balanced_BST(head, length)\n \n # trivial case\n # return immediately\n if head.nil? || length == 0\n return ... | [
"0.59835863",
"0.59456754",
"0.58433086",
"0.57731164",
"0.55377686",
"0.55249125",
"0.54187435",
"0.53409517",
"0.5265363",
"0.5238305",
"0.5237193",
"0.52317846",
"0.5231493",
"0.5222523",
"0.5216074",
"0.52154464",
"0.52142787",
"0.52111673",
"0.5108704",
"0.5107876",
"0.5... | 0.63477045 | 0 |
find the next node from the current one based on the given key | def next_node(current, key)
return nil, nil unless current[:key_length]
next_key = key[0...current[:key_length]]
if current[:nodes].has_key?(next_key)
return current[:nodes][next_key], key[next_key.length..-1]
else
return nil, nil
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def search_key key\n node = self.head\n while node\n return node if node.key == key\n nc = node.child \n while nc\n return nc if nc.key == key\n ncs = nc.sibling\n while ncs \n return ncs if ncs.key == key\n ncs = ncs.sibling\n end\n nc =... | [
"0.7783391",
"0.7575067",
"0.7539671",
"0.7466803",
"0.73225266",
"0.7304928",
"0.7287149",
"0.72754735",
"0.7262475",
"0.7243163",
"0.72368056",
"0.7192649",
"0.7173257",
"0.7171666",
"0.7145616",
"0.7129102",
"0.7111854",
"0.7111854",
"0.7099636",
"0.7062841",
"0.7040026",
... | 0.8200249 | 0 |
finds a shared prefix between the two strings, or nil if there isn't any | def shared_prefix(a, b)
shared_prefix_length = [a.length, b.length].min
while shared_prefix_length >= 0
a_prefix = a[0..shared_prefix_length]
b_prefix = b[0..shared_prefix_length]
return a_prefix if a_prefix == b_prefix
shared_prefix_length -= 1
end
return nil
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def common_prefix(a,b)\n return '' if b.nil?\n 0.upto(a.length) {|i|\n return (i == 0 ? \"\" : a[0..i-1]) if a[0..i] != b[0..i]\n }\n ''\nend",
"def common_prefix(words)\n smallest_string= words.min_by{|word| word.size}\n\n result = \"\"\n\n smallest_string.chars.each_with_index do |current_char, curre... | [
"0.79685265",
"0.71222836",
"0.69956446",
"0.69941056",
"0.6879243",
"0.6851252",
"0.6795426",
"0.6791654",
"0.6774552",
"0.67604685",
"0.67096066",
"0.6635475",
"0.66062385",
"0.6604037",
"0.6500269",
"0.64965665",
"0.64592725",
"0.64484715",
"0.6447398",
"0.6424874",
"0.640... | 0.83040935 | 0 |
Enable or disable maintenance mode. This endpoint only works on the local agent. | def enable enable=true, reason=nil, options=nil
raw = @conn.put do |req|
url = ["/v1/agent/maintenance"]
url << use_named_parameter('enable', enable.to_s)
url << use_named_parameter('reason', reason) unless reason.nil?
url << use_named_parameter('dc', options[:dc]) if options and options[:dc]
req.url concat_url url
end
if raw.status == 200
@raw = raw
return true
else
raise Diplomat::UnknownStatus, "status #{raw.status}"
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def maintenance_mode(primitive)\n retry_command {\n pcs 'property', 'set', \"maintenance-mode=#{primitive}\"\n }\n end",
"def activate_maintenance_mode\n return unless maintenance && pending_migrations?\n callback(:activate_maintenance_mode) do\n notify(:activate_maintenance_mode)\n ... | [
"0.7179096",
"0.69145447",
"0.6753331",
"0.670682",
"0.66946447",
"0.66529024",
"0.65764844",
"0.65567106",
"0.6490434",
"0.6470255",
"0.64409876",
"0.6370464",
"0.6296074",
"0.62374806",
"0.6174633",
"0.6159143",
"0.6152293",
"0.6128881",
"0.6074654",
"0.60273826",
"0.602738... | 0.6822855 | 2 |
/FIX FOR AN ISSUE WITH MINITEST AND RAILS 4 | def admin?
@current_user && @current_user.has_role?(:admin)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def ordered_railties; end",
"def ordered_railties; end",
"def migration_railties; end",
"def migration_railties; end",
"def dev_minor() end",
"def appraisals; end",
"def appraisals; end",
"def production_curtailment; end",
"def on_307; on_330; end",
"def orm_patches_applied; end",
"def version_... | [
"0.5719549",
"0.5719549",
"0.55357546",
"0.55357546",
"0.54979336",
"0.5453287",
"0.5453287",
"0.5435803",
"0.5413073",
"0.5394526",
"0.5381407",
"0.5381407",
"0.5381407",
"0.5381407",
"0.53382653",
"0.53382653",
"0.53343225",
"0.5322545",
"0.5305532",
"0.5281019",
"0.5275196... | 0.0 | -1 |
GET /comments GET /comments.json | def index
cr = current_rulemaking
conditions = get_conditions
if conditions[0].empty?
c = cr.comments.all
else
#do left outer join in case there are no conditions on suggested_changes
c = cr.comments.where("id IN (?)", cr.comments.left_outer_joins(:suggested_changes).where(conditions).select(:id))
end
c = c.order(:order_in_list)
respond_to do |format|
format.html {
@total_comments = cr.comments.count
@total_commenters = cr.comments.sum(:num_commenters)
@filtered = !conditions[0].empty?
@filter_querystring = remove_empty_elements(filter_params_all)
@comments = c.page(params[:page]).per_page(10)
}
format.xlsx {
@comments = c
response.headers['Content-Disposition'] = 'attachment; filename="comments.xlsx"'
}
format.csv {
stream_csv(c)
}
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def comments\n client.get(\"/#{id}/comments\")\n end",
"def comments\n @list.client.get(\"#{url}/comments\")\n end",
"def comments\n @list.client.get(\"#{url}/comments\")\n end",
"def comments\n render json: @post.comments\n end",
"def list\n comments = Comment.where(post: @p... | [
"0.8573962",
"0.7837408",
"0.7837408",
"0.7555969",
"0.75293446",
"0.75213426",
"0.74966145",
"0.739651",
"0.7300984",
"0.729431",
"0.7285037",
"0.72734404",
"0.72714454",
"0.7247879",
"0.724236",
"0.7208452",
"0.72043866",
"0.71849746",
"0.7177853",
"0.71577495",
"0.715637",... | 0.0 | -1 |
GET /comments/1 GET /comments/1.json | def show
get_filtering_and_next_and_previous
@change_log_entries = current_rulemaking.change_log_entries.where(comment: @comment).order(created_at: :desc).page(params[:page]).per_page(10)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def comments\n client.get(\"/#{id}/comments\")\n end",
"def comments\n @list.client.get(\"#{url}/comments\")\n end",
"def comments\n @list.client.get(\"#{url}/comments\")\n end",
"def comments\n @article = Article.find(params[:id])\n @comments = @article.comments\n\n respond_... | [
"0.82821167",
"0.74428797",
"0.74428797",
"0.7435283",
"0.74061906",
"0.7305913",
"0.7283544",
"0.7258592",
"0.72560287",
"0.72484696",
"0.7243203",
"0.7219824",
"0.7219523",
"0.7218877",
"0.7174389",
"0.7158426",
"0.71583927",
"0.71185815",
"0.711669",
"0.710114",
"0.7100739... | 0.0 | -1 |
POST /comments POST /comments.json | def create
# we only get here if this comment is being manually entered.
@comment = Comment.new(comment_params)
c_max = current_rulemaking.comments.maximum(:order_in_list)
next_order_in_list = (c_max.nil? ? 0 : c_max) + 1
@comment.order_in_list = next_order_in_list
@comment.rulemaking = current_rulemaking
@comment.manually_entered = true
respond_to do |format|
if @comment.save
suggested_change_change_hash = save_suggested_changes
save_change_log(current_user,{comment: @comment, suggested_change_changes: suggested_change_change_hash, action_type: 'create'})
format.html { redirect_to edit_comment_path(@comment), notice: 'Comment was successfully created.' }
format.json { render :show, status: :created, location: @comment }
else
set_select_options
format.html { render :new }
format.json { render json: @comment.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n @comment = @post.comments.new(comment_params)\n if @comment.save\n render json: @comment, status: :created\n else\n render json: @comment.errors, status: :unprocessable_entity\n end\n\n end",
"def comment options={}\n client.post(\"/#{id}/comments\", options)\n end",
... | [
"0.7513347",
"0.74232185",
"0.73758155",
"0.72379327",
"0.7206818",
"0.7087521",
"0.70696557",
"0.70558655",
"0.7049475",
"0.699036",
"0.69837004",
"0.6967921",
"0.6959057",
"0.6911904",
"0.69105834",
"0.68748885",
"0.6873017",
"0.6823846",
"0.6813315",
"0.6789194",
"0.678650... | 0.0 | -1 |
PATCH/PUT /comments/1 PATCH/PUT /comments/1.json | def update
respond_to do |format|
if @comment.update(comment_params)
suggested_change_change_hash = save_suggested_changes
save_change_log(current_user,{comment: @comment, suggested_change_changes: suggested_change_change_hash, action_type: 'edit'})
@filter_querystring = remove_empty_elements(filter_params_all)
format.html { redirect_to edit_comment_path(@comment,@filter_querystring), notice: 'Comment was successfully updated.' }
format.json { render :show, status: :ok, location: @comment }
else
set_select_options
format.html { render :edit }
format.json { render json: @comment.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update\n json_update_and_sanitize(comment, comment_params, Comment)\n end",
"def update\n @comment = Comment.find(params[:comment_id])\n @comment.update(comment_params)\n render json: @comment\n end",
"def update\n user = User.find_by({token: env['HTTP_TOKEN']})\n comment = user.com... | [
"0.72262836",
"0.7067242",
"0.70644766",
"0.70267683",
"0.68851054",
"0.6778662",
"0.673702",
"0.67316365",
"0.6723994",
"0.6723994",
"0.66953856",
"0.66685724",
"0.6661655",
"0.6639885",
"0.66343516",
"0.66292053",
"0.66050345",
"0.65994745",
"0.6597625",
"0.6597625",
"0.659... | 0.0 | -1 |
Use callbacks to share common setup or constraints between actions. | def set_comment
@comment = current_rulemaking.comments.find_by(id: params[:id])
if @comment.nil?
respond_to do |format|
format.html { redirect_to comments_url, alert: "Comment #{params[:id]} was not found." }
format.json { head :no_content }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_required_actions\n # TODO: check what fields change to asign required fields\n end",
"def action_hook; end",
"def run_actions; end",
"def define_action_hook; end",
"def actions; end",
"def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_... | [
"0.6163163",
"0.6045976",
"0.5946146",
"0.591683",
"0.5890051",
"0.58349305",
"0.5776858",
"0.5703237",
"0.5703237",
"0.5652805",
"0.5621621",
"0.54210985",
"0.5411113",
"0.5411113",
"0.5411113",
"0.5391541",
"0.53794575",
"0.5357573",
"0.53402257",
"0.53394014",
"0.53321576"... | 0.0 | -1 |
Never trust parameters from the scary internet, only allow the white list through. | def comment_params
params.require(:comment).permit(:source_id, :first_name, :last_name, :email, :organization, :state, :comment_text, :attachment_name, :attachment_url, :num_commenters, :summary, :comment_status_type_id, :notes, :manually_entered, :comment_data_source_id, attached_files: [])
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def strong_params\n params.require(:user).permit(param_whitelist)\n end",
"def strong_params\n params.require(:listing_member).permit(param_whitelist)\n end",
"def allow_params_authentication!; end",
"def allowed_params\n ALLOWED_PARAMS\n end",
"def default_param_whitelist\n [\"mode\"]\n... | [
"0.6980957",
"0.6783065",
"0.6747844",
"0.6741468",
"0.67356336",
"0.6592548",
"0.65036845",
"0.64978707",
"0.64825076",
"0.64795035",
"0.64560914",
"0.64397955",
"0.6379666",
"0.6376688",
"0.6366702",
"0.6319728",
"0.6300833",
"0.6300629",
"0.6294277",
"0.6293905",
"0.629117... | 0.0 | -1 |
GET /election/new form for creating a new voting group | def new
@group = LunchGroup.new
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new\n @election = Election.new\n @election.choices.build\n @election.choices.each do |choice|\n choice.election_id = @election.id\n end\n respond_with @election\n\tend",
"def new\n @group = @authorized_group\n @candidate = @group.candidates.build \n respond_to do |format|\n ... | [
"0.74072677",
"0.6935537",
"0.6881596",
"0.6813523",
"0.67439014",
"0.67144424",
"0.6675272",
"0.66682315",
"0.66682315",
"0.66286516",
"0.66273093",
"0.6608924",
"0.6560848",
"0.6559121",
"0.65434533",
"0.6537528",
"0.65373564",
"0.6520924",
"0.64808667",
"0.6469617",
"0.642... | 0.0 | -1 |
POST /election actually creates a new voting group | def create
LunchGroup.transaction do
@group = LunchGroup.new(:name=>params[:name])
@group.prefs = params[:prefs]
if success = @group.save
@admin_user = GroupMember.new(:email=>prefs[:admin_email])
@group.add_admin @admin_user
end
end
if success
flash[:notice] = "New group was created!"
respond_to do |format|
format.html {redirect_to :show}
format.json{render :json=>{success:true} }
end
else
flash[:notice] = "Please fix the errors."
respond_to do |format|
format.html {render :new}
format.json{render :json=>{success:false, :errors=>@group.errors} }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n @election = Election.new(election_params)\n respond_to do |format|\n if @election.save\n format.html { redirect_to [:admin, @election], notice: 'Election was successfully created.' }\n format.json { render :show, status: :created, location: @election }\n else\n for... | [
"0.6827538",
"0.6807363",
"0.6807363",
"0.6793103",
"0.6790869",
"0.66477555",
"0.66108686",
"0.65948427",
"0.6532868",
"0.64155406",
"0.63796175",
"0.6314158",
"0.62240416",
"0.6175115",
"0.6120723",
"0.608254",
"0.60743785",
"0.60743785",
"0.6041563",
"0.6004767",
"0.597924... | 0.0 | -1 |
GET /election/[group_id] form for editing an existing election group | def edit
@group = LunchGroup.find_by_id params[:id]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def edit\n @group = Group.find(params[:id])\n end",
"def edit\n @group = Group.find(params[:id])\n end",
"def edit\n @group = Group.find_by_id params[:id]\n end",
"def edit\n render partial: \"user_groups/form\",\n locals: { user_group: @user_group }\n end",
"def edit_group(... | [
"0.74375737",
"0.7437421",
"0.7422804",
"0.71681476",
"0.7107561",
"0.7107561",
"0.6987278",
"0.6885471",
"0.6861747",
"0.6794342",
"0.6785728",
"0.6591702",
"0.6539471",
"0.653162",
"0.64831245",
"0.64808255",
"0.6476432",
"0.6471809",
"0.6471157",
"0.6450893",
"0.6439387",
... | 0.72705966 | 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.