query
stringlengths
7
9.55k
document
stringlengths
10
363k
metadata
dict
negatives
listlengths
0
101
negative_scores
listlengths
0
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Show invalid properties with the reasons. Usually used together with valid?
def list_invalid_properties invalid_properties = Array.new invalid_properties end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_invalid_properties\n invalid_properties = super\n if @class_id.nil?\n invalid_properties.push('invalid value for \"class_id\", class_id cannot be nil.')\n end\n\n if @object_type.nil?\n invalid_properties.push('invalid value for \"object_type\", object_type cannot be nil....
[ "0.76497203", "0.76497203", "0.76497203", "0.76497203", "0.7637422", "0.7637422", "0.7637422", "0.7637422", "0.7637422", "0.7637422", "0.7637422", "0.7637422", "0.7356452", "0.7334807", "0.72685325", "0.7238964", "0.7231359", "0.72258264", "0.7208294", "0.71760833" ]
0.7170241
93
Check to see if the all the properties in the model are valid
def valid? true end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validate_properties\n true\n end", "def validate_properties\n true\n end", "def validate\n super\n\n check_optional_property :collection, String\n check_optional_property :create, String\n check_optional_property :delete, String\n check_optional_property :...
[ "0.78992486", "0.78992486", "0.70971805", "0.70782334", "0.7032205", "0.7031276", "0.69510347", "0.6869891", "0.6858077", "0.6858077", "0.68287027", "0.6823878", "0.6820306", "0.68144894", "0.6794656", "0.6752167", "0.66843414", "0.6676546", "0.6667755", "0.66296124", "0.6618...
0.0
-1
Checks equality by comparing each attribute.
def ==(o) return true if self.equal?(o) self.class == o.class && year == o.year && make == o.make && model == o.model && trim == o.trim && short_trim == o.short_trim && body_type == o.body_type && body_subtype == o.body_subtype && vehicle_type == o.vehicle_type && transmission == o.transmission && drivetrain == o.drivetrain && fuel_type == o.fuel_type && engine == o.engine && engine_size == o.engine_size && engine_block == o.engine_block && doors == o.doors && cylinders == o.cylinders && made_in == o.made_in && steering_type == o.steering_type && antibrake_sys == o.antibrake_sys && tank_size == o.tank_size && overall_height == o.overall_height && overall_length == o.overall_length && overall_width == o.overall_width && std_seating == o.std_seating && opt_seating == o.opt_seating && highway_miles == o.highway_miles && city_miles == o.city_miles && engine_measure == o.engine_measure && engine_aspiration == o.engine_aspiration && trim_r == o.trim_r end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ==(other)\n attributes == other.attributes\n end", "def ==(other) # :nodoc:\n @attrs == other.attrs\n end", "def eql?(other)\n return true if self == other\n @@ATTRIBUTES.each do |att|\n return false unless self.send(att).eql?(other.send(att))\n end\n true\n en...
[ "0.7291717", "0.7188103", "0.70395297", "0.7007927", "0.68874705", "0.6861532", "0.6707156", "0.6660597", "0.66147524", "0.658478", "0.6584619", "0.6580019", "0.65543133", "0.6543933", "0.65068495", "0.6479513", "0.6456241", "0.6415999", "0.6412208", "0.6412208", "0.6412208",...
0.0
-1
Calculates hash code according to all attributes.
def hash [year, make, model, trim, short_trim, body_type, body_subtype, vehicle_type, transmission, drivetrain, fuel_type, engine, engine_size, engine_block, doors, cylinders, made_in, steering_type, antibrake_sys, tank_size, overall_height, overall_length, overall_width, std_seating, opt_seating, highway_miles, city_miles, engine_measure, engine_aspiration, trim_r].hash end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def attr_hash\n Digest::MD5.hexdigest(\"#{@name}:#{@ruby_type}\")\n end", "def hash() end", "def hash() end", "def hash() end", "def hash() end", "def hash() end", "def hash() end", "def hash() end", "def hash\n code = 17\n code = 37*code + @x.hash\n code = 37*code + @y.hash\n ...
[ "0.7118691", "0.70400536", "0.70400536", "0.70400536", "0.70400536", "0.70400536", "0.70400536", "0.70400536", "0.68960655", "0.67847186", "0.6707762", "0.670052", "0.6688737", "0.66705376", "0.6489735", "0.6462376", "0.6462376", "0.64444333", "0.6413127", "0.6395483", "0.638...
0.0
-1
Builds the object from hash
def build_from_hash(attributes) return nil unless attributes.is_a?(Hash) self.class.swagger_types.each_pair do |key, type| if type =~ /\AArray<(.*)>/i # check to ensure the input is an array given that the the attribute # is documented as an array but the input is not if attributes[self.class.attribute_map[key]].is_a?(Array) self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) }) end elsif !attributes[self.class.attribute_map[key]].nil? self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]])) end # or else data not found in attributes(hash), not an issue as the data can be optional end self end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def build(hash)\n obj = new\n hash.each_pair do |k,v|\n obj[k] = v if variables[k]\n end\n return obj\n end", "def build_from_hash(attributes)\n\n end", "def build_from_hash(hash)\n instance = self.new\n\n # Add the instance attributes dynamically ...
[ "0.8011074", "0.7470833", "0.7457607", "0.7256629", "0.72455454", "0.70060325", "0.6973257", "0.6955014", "0.69459796", "0.69398683", "0.69363195", "0.6917627", "0.6872358", "0.6796184", "0.6783521", "0.67575246", "0.67575246", "0.67560464", "0.67514306", "0.67136854", "0.666...
0.0
-1
Deserializes the data based on type
def _deserialize(type, value) case type.to_sym when :DateTime DateTime.parse(value) when :Date Date.parse(value) when :String value.to_s when :Integer value.to_i when :Float value.to_f when :BOOLEAN if value.to_s =~ /\A(true|t|yes|y|1)\z/i true else false end when :Object # generic object (usually a Hash), return directly value when /\AArray<(?<inner_type>.+)>\z/ inner_type = Regexp.last_match[:inner_type] value.map { |v| _deserialize(inner_type, v) } when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/ k_type = Regexp.last_match[:k_type] v_type = Regexp.last_match[:v_type] {}.tap do |hash| value.each do |k, v| hash[_deserialize(k_type, k)] = _deserialize(v_type, v) end end else # model temp_model = Marketcheck_api_sdk.const_get(type).new temp_model.build_from_hash(value) end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s...
[ "0.7330926", "0.7274019", "0.72504056", "0.7245751", "0.72291344", "0.72291344", "0.72291344", "0.72291344", "0.72291344", "0.72291344", "0.72291344", "0.72291344", "0.72291344", "0.72291344", "0.72291344", "0.72291344", "0.72291344", "0.72291344", "0.72291344", "0.72291344", ...
0.0
-1
Returns the string representation of the object
def to_s to_hash.to_s end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def to_s\n @object.to_s\n end", "def to_s\n object.to_s\n end", "def serialize(object)\n object.to_s\n end", "def to_s\n self.inspect\n end", "def to_s\n @string || @object.to_s('F')\n end", "def to_s\n @string || @object.to_s('F')\n end", "de...
[ "0.901024", "0.89506465", "0.84703195", "0.83409667", "0.8337169", "0.8337169", "0.8332247", "0.82546586", "0.8145818", "0.8144667", "0.81357557", "0.812714", "0.8093436", "0.8086725", "0.8073356", "0.8039774", "0.80308646", "0.80064154", "0.80064154", "0.80064154", "0.800641...
0.0
-1
to_body is an alias to to_hash (backward compatibility)
def to_body to_hash end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def to_body\r\n to_hash\r\n end", "def to_body\n to_hash\nend", "def to_body\n to_hash\nend" ]
[ "0.84283537", "0.8347048", "0.8347048" ]
0.0
-1
Returns the object in the form of hash
def to_hash hash = {} self.class.attribute_map.each_pair do |attr, param| value = self.send(attr) next if value.nil? hash[param] = _to_hash(value) end hash end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def to_hash\n object\n end", "def hash\r\n return to_s.hash\r\n end", "def hash\n to_a.hash\n end", "def hash\n [_hash, name, owner].hash\n end", "def hash\n return to_s.hash\n end", "def hash\n @hash\n end", "def hash\n @hash.hash\n end", "def hash\n ...
[ "0.8270299", "0.78767854", "0.78726953", "0.7802364", "0.7789188", "0.77806795", "0.7775915", "0.7767511", "0.7760525", "0.7760525", "0.77559966", "0.7731286", "0.7713916", "0.7713916", "0.7713916", "0.7713916", "0.7713916", "0.7713916", "0.7713916", "0.7713916", "0.7713916",...
0.0
-1
Outputs nonarray value in the form of hash For object, use to_hash. Otherwise, just return the value
def _to_hash(value) if value.is_a?(Array) value.compact.map { |v| _to_hash(v) } elsif value.is_a?(Hash) {}.tap do |hash| value.each { |k, v| hash[k] = _to_hash(v) } end elsif value.respond_to? :to_hash value.to_hash else value end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hash\n [value].hash\n end", "def hash\n [value].hash\n end", "def hash\n\t\tvalue.hash\n\tend", "def hash\n value.hash\n end", "def hash\n @value.hash\n end", "def hash\r\n return to_s.hash\r\n end", "def to_hash\n @value\n end", "def to_hash\n @va...
[ "0.6718583", "0.6718583", "0.6669122", "0.66569644", "0.65872085", "0.64544505", "0.64143497", "0.64143497", "0.6380626", "0.63490635", "0.6302817", "0.62250805", "0.6151444", "0.6102206", "0.6081347", "0.6081347", "0.6073172", "0.60373986", "0.6020187", "0.5937615", "0.59016...
0.0
-1
This is the controller method responsible for displaying all the reviews for a share
def index @reviews = @share.reviews.order(priority: :asc) @reviewToAlter = @share.reviews.new end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create\n\t\t@reviews = @share.reviews.order(priority: :asc)\n\t\t@reviewToAlter = @share.reviews.new(review_params)\n\t\t\n\t\tif @reviewToAlter.save\n\t\t\tredirect_to share_reviews_path(@share), notice: \"Review saved!\"\n\t\telse\n\t\t\tflash[:alert] = \"Error : #{@reviewToAlter.errors.full_messages.to_sent...
[ "0.7431748", "0.74059606", "0.7329606", "0.7329606", "0.7329606", "0.7329606", "0.7329606", "0.7329606", "0.7329606", "0.7329606", "0.7329606", "0.7329606", "0.7329606", "0.7329606", "0.7202078", "0.7202078", "0.7161231", "0.711936", "0.7092843", "0.70846015", "0.70543146", ...
0.74671495
0
this method takes care of creating new reviews
def create @reviews = @share.reviews.order(priority: :asc) @reviewToAlter = @share.reviews.new(review_params) if @reviewToAlter.save redirect_to share_reviews_path(@share), notice: "Review saved!" else flash[:alert] = "Error : #{@reviewToAlter.errors.full_messages.to_sentence}" render :index end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create\n @review = Review.new(review_params)\n if @review.save\n redirect_to reviews_path\n else\n render 'new'\n end\n end", "def create\n review = course.reviews.new(review_params)\n \n if review.save\n render json: ReviewSerializer.new(review)...
[ "0.7899681", "0.78670037", "0.77011317", "0.766486", "0.7655177", "0.7631226", "0.762612", "0.76167107", "0.75981355", "0.758156", "0.75776124", "0.7572586", "0.7566595", "0.75486314", "0.75380796", "0.7514578", "0.74950445", "0.7490343", "0.7468508", "0.7468508", "0.7467554"...
0.7508563
16
edit is the method which takes care of editing a review
def edit @reviews = @share.reviews.order(priority: :asc) @reviewToAlter = Review.find(params[:id]) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def edit_review\n # client should be able to edit a review.\n puts \"Which review would you like to edit?\"\n pp Client.find_by(name: @@client_name).reviews\n puts \"Please enter the ID\"\n input = gets.strip\n selected_review = Review.find_by(id: input)\n puts...
[ "0.77618456", "0.768884", "0.76548994", "0.7574851", "0.7574851", "0.7574851", "0.7574851", "0.7574851", "0.7574851", "0.7574851", "0.756434", "0.756434", "0.756434", "0.756434", "0.756434", "0.756434", "0.756434", "0.756434", "0.756434", "0.756434", "0.756434", "0.756434",...
0.81355745
0
update routine updates a modified review. If the review is successfully modified, User will be notified using flash
def update @reviews = @share.reviews.order(priority: :asc) @reviewToAlter = Review.find(params[:id]) @review = Review.find(params[:id]) if @review.update(review_params) redirect_to share_reviews_path(@share), notice: 'Review successfully updated!' else flash[:alert] = "Error : #{@review.errors.full_messages.to_sentence}" render :edit end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update\n\t\t\t\trender_invalid_action(current_review) unless current_review.update(update_driver_review_params)\n\t\t\tend", "def update\n @review = Review.find(params[:id])\n\n if @review.update_attributes(params[:review])\n\t\t\tredirect_to reviews_path, :notice => \"De waardering is aangepast.\"\n ...
[ "0.79077613", "0.7738259", "0.76770145", "0.7647901", "0.7647381", "0.75905484", "0.7584547", "0.75218093", "0.74996525", "0.74972767", "0.74942213", "0.74378693", "0.74378693", "0.74378693", "0.7428504", "0.7428504", "0.7428504", "0.7428504", "0.7428504", "0.7428504", "0.742...
0.7721352
2
destroy method takes care of deleting a review
def destroy @review = Review.find(params[:id]) @review.destroy redirect_to share_reviews_path(@share), notice: "Review successfully deleted!" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def destroy\n @review.destroy\n end", "def destroy\n @review = find_review\n @review.destroy\n redirect_to reviews_path\n end", "def destroy\n @review.destroy\n\n head :no_content\n end", "def destroy\n @review.destroy\n\n head :no_content\n end", "def destroy\n @review.destr...
[ "0.8825942", "0.8384066", "0.83834416", "0.83834416", "0.83834416", "0.8317577", "0.8287302", "0.8250263", "0.8074591", "0.8063523", "0.8035887", "0.7986956", "0.79630625", "0.7929023", "0.79009676", "0.7885112", "0.7885112", "0.78683513", "0.7867553", "0.7865634", "0.7836517...
0.81108975
8
review_params lists all the permitted parameters for review table
def review_params params.require(:review).permit(:name, :priority, :comment) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def review_params\n parm = params.fetch(:review, {}).permit!\n p 'params', parm\n parm\n end", "def review_params\n # params.require(:review).permit(:membership_id, :user_id, :rating, :comments, :lti_app_id)\n end", "def review_params\n params.permit(:review, :body, :title, :revi...
[ "0.84528714", "0.7881275", "0.7713893", "0.7698338", "0.7690936", "0.7651478", "0.7588309", "0.757079", "0.75666517", "0.75534165", "0.7545849", "0.75402623", "0.75402623", "0.75402623", "0.7513782", "0.74998313", "0.7499796", "0.7496023", "0.74620646", "0.7461283", "0.745369...
0.7401983
32
Validate the constraint with the given name, which should have been added previously with NOT VALID.
def validate_constraint(name) @operations << {:op => :validate_constraint, :name => name} end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validate_constraint(table, name)\n current_instructions << Instructions::ValidateConstraint.new(\n table: table,\n name: name,\n )\n end", "def validator_defined_for?(constraint_name)\n !!validator_for(constraint_name)\n end", "def name_invalid\n errors.add(:name, :unk...
[ "0.7206155", "0.6627902", "0.65585315", "0.6364898", "0.6355886", "0.6327348", "0.62545794", "0.6238708", "0.61284125", "0.60058", "0.60058", "0.5994172", "0.5992439", "0.5991104", "0.5974525", "0.59616953", "0.59547657", "0.5903176", "0.58597124", "0.5849842", "0.58259225", ...
0.7885657
0
The minimum value of the data type used in range partitions, useful as an argument to from.
def minvalue MINVALUE end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def min\r\n @range.min\r\n end", "def min\n @range.begin\n end", "def casted_minimum\n minimum.blank? ? nil : (qtype_name == \"decimal\" ? minimum : minimum.to_i)\n end", "def min\n return super if super.nil?\n (numeric_type == 'Integer') ? super.to_i : super.to_f\n end", "def min\n ...
[ "0.71220946", "0.70545757", "0.6893678", "0.67407", "0.6515799", "0.6494269", "0.64925396", "0.6426321", "0.64184254", "0.64184254", "0.6386808", "0.637547", "0.635593", "0.6348471", "0.6339401", "0.632692", "0.6290225", "0.62870634", "0.6271629", "0.6221216", "0.6221216", ...
0.67341924
4
The minimum value of the data type used in range partitions, useful as an argument to to.
def maxvalue MAXVALUE end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def casted_minimum\n minimum.blank? ? nil : (qtype_name == \"decimal\" ? minimum : minimum.to_i)\n end", "def min\n return super if super.nil?\n (numeric_type == 'Integer') ? super.to_i : super.to_f\n end", "def min\r\n @range.min\r\n end", "def minvalue\n MINVALUE\n end", "def m...
[ "0.7153883", "0.7048134", "0.67539436", "0.6592181", "0.65727943", "0.65612084", "0.6537592", "0.6452847", "0.6444002", "0.63955337", "0.63713956", "0.63656867", "0.6273183", "0.6239701", "0.62211186", "0.62211186", "0.62070656", "0.61946446", "0.6178495", "0.6156211", "0.611...
0.0
-1
Assumes range partitioning, sets the inclusive minimum value of the range for this partition.
def from(*v) @from = v end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def min(lower_bound, inclusive = nil)\n @min = lower_bound\n @min_inclusive = inclusive\n end", "def min(lower_bound, inclusive = nil)\n @min = lower_bound\n @min_inclusive = inclusive\n end", "def min\n @range.begin\n end", "def set_min(min)\n\n ...
[ "0.72577417", "0.72577417", "0.71424174", "0.7122371", "0.70912755", "0.69917667", "0.6888057", "0.6812013", "0.68096876", "0.6666901", "0.6399381", "0.6349612", "0.62588686", "0.6254483", "0.6244712", "0.62080765", "0.62066627", "0.61895996", "0.61192626", "0.61116827", "0.6...
0.0
-1
Assumes range partitioning, sets the exclusive maximum value of the range for this partition.
def to(*v) @to = v end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def max_range=(range)\n @max_range = [0, range].max\n @min_range = [@min_range, @max_range].min\n end", "def set_max( max )\n if IntegerOption.bounds_ok?( @min, max )\n @max = max\n else\n @max = nil\n raise \"invalid upper bound: #{ max.to_s }\"\n end\n end", "def max\r\n @r...
[ "0.7247103", "0.6731997", "0.6690845", "0.6576741", "0.65313303", "0.6524582", "0.6524582", "0.64506906", "0.63527775", "0.632476", "0.62763554", "0.6214056", "0.6212238", "0.6170544", "0.6155219", "0.61088014", "0.61088014", "0.61088014", "0.61088014", "0.61088014", "0.61088...
0.0
-1
Assumes list partitioning, sets the values to be included in this partition.
def values_in(*v) @in = v end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def partition_list()\nend", "def add!(partition, dirty = false)\n if @list[partition.name].nil?\n @list[partition.name] = partition\n else\n @list[partition.name].merge!(partition, simulate = false, dirty)\n end\n self\n end", "def set(values); end", "def set(partition, field, value)\n...
[ "0.5762416", "0.5667353", "0.566424", "0.5646707", "0.5622374", "0.5546669", "0.5455992", "0.5372267", "0.5271329", "0.525934", "0.5230752", "0.51615244", "0.51250994", "0.50517833", "0.505155", "0.5036472", "0.5034609", "0.5030784", "0.50031763", "0.50018364", "0.49964488", ...
0.0
-1
Assumes hash partitioning, sets the modulus for this parition.
def modulus(v) @modulus = v end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def modulo(p0) end", "def modulus\n distance_to(origin)\n end", "def rehash(oldhash, size)\n (oldhash + 1) % size\n end", "def divmod(p0) end", "def partition_proposition(proposition)\n @remainder &= proposition\n @left &= proposition\n @right &= proposition\n ...
[ "0.6106353", "0.59064716", "0.58948857", "0.5822578", "0.57684463", "0.55961597", "0.55376786", "0.54671663", "0.5466879", "0.54487014", "0.5404215", "0.5349049", "0.53391105", "0.53174484", "0.526858", "0.526858", "0.5246383", "0.52278817", "0.5220499", "0.5183994", "0.51839...
0.7081045
0
Assumes hash partitioning, sets the remainder for this parition.
def remainder(v) @remainder = v end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def partition_proposition(proposition)\n @remainder &= proposition\n @left &= proposition\n @right &= proposition\n end", "def rehash(oldhash, size)\n (oldhash + 1) % size\n end", "def set_partition\n @partition = Partition.find(params[:id])\n end", "def partition(i...
[ "0.65524477", "0.62586033", "0.56729954", "0.5489327", "0.54774976", "0.5430847", "0.5423311", "0.54208827", "0.5387516", "0.53596234", "0.5349726", "0.5324894", "0.53132087", "0.5308499", "0.52947605", "0.5264948", "0.52567273", "0.52390057", "0.52349293", "0.522618", "0.520...
0.5119667
27
Sets that this is a default partition, where values not in other partitions are stored.
def default @default = true end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_default_cluster\n if study.default_options[:cluster].nil?\n cluster = study.cluster_groups.by_name(cluster_name_by_file_type)\n study.default_options[:cluster] = cluster.name if cluster.present?\n end\n end", "def reset_default_cluster(study:)\n if study.cluster_groups.by_name(study.d...
[ "0.61621904", "0.5887786", "0.5762772", "0.5762772", "0.5732178", "0.5605029", "0.5546228", "0.5460707", "0.5460707", "0.54267246", "0.5426431", "0.5386847", "0.53852624", "0.5381668", "0.53813004", "0.5363579", "0.5362803", "0.53579223", "0.5354793", "0.5327952", "0.53276473...
0.5201624
31
The from and to values of this partition for a range partition.
def range [@from, @to] end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def range\n (@from..@to)\n end", "def range\n @from..@to\n end", "def collect\n [@range_low, @range_high]\n end", "def range\n\t\t\t#(@end > @start) ? (@start .. @end) : (@end .. @start)\n\t\t\t(start .. self.end)\n\t\tend", "def ranges\n attributes.fetch(:ranges)\n ...
[ "0.7436858", "0.72955906", "0.72890246", "0.69145817", "0.6898283", "0.68982786", "0.68723494", "0.68503475", "0.6847249", "0.6791602", "0.6756868", "0.6735238", "0.6723739", "0.6713718", "0.6713718", "0.6713718", "0.6713718", "0.66967636", "0.6638313", "0.66310805", "0.65523...
0.81356955
0
The values to include in this partition for a list partition.
def list @in end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def GetPartitionList\n deep_copy(@partition_info)\n end", "def partition_list()\nend", "def sites\n @list.map {|partition_name, partition| partition.sites.map {|site| {site => partition_name} }}.flatten(1)\n end", "def all\n @list.values\n end", "def values\n @values ||= []\n ...
[ "0.6419158", "0.5873987", "0.5651121", "0.5589368", "0.5561751", "0.5551433", "0.554591", "0.5534171", "0.5534171", "0.5534171", "0.5534171", "0.5534171", "0.5534171", "0.5534171", "0.55264807", "0.5521307", "0.54976195", "0.54662544", "0.5450633", "0.54149896", "0.5413755", ...
0.48294732
88
The modulus and remainder to use for this partition for a hash partition.
def hash_values [@modulus, @remainder] end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def modulo(p0) end", "def modulus(v)\n @modulus = v\n end", "def remainder(val); end", "def modulo(other)\n self % other\n end", "def remainder(p0) end", "def modulus\n distance_to(origin)\n end", "def divmod(p0) end", "def mod(first_number, second_number)\n first_number % se...
[ "0.6920085", "0.6797567", "0.674929", "0.6711626", "0.66135556", "0.6557639", "0.6499426", "0.6435647", "0.64005387", "0.6293782", "0.61620873", "0.61104375", "0.60770786", "0.60770786", "0.6060377", "0.6050155", "0.6028834", "0.60186404", "0.6014417", "0.5994006", "0.5962720...
0.6458688
7
Determine the appropriate partition type for this partition by which methods were called on it.
def partition_type raise Error, "Unable to determine partition type, multiple different partitioning methods called" if [@from || @to, @list, @modulus || @remainder, @default].compact.length > 1 if @from || @to raise Error, "must call both from and to when creating a partition of a table if calling either" unless @from && @to :range elsif @in :list elsif @modulus || @remainder raise Error, "must call both modulus and remainder when creating a partition of a table if calling either" unless @modulus && @remainder :hash elsif @default :default else raise Error, "unable to determine partition type, no partitioning methods called" end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def IsPartitionable(entry)\n entry = deep_copy(entry)\n Ops.get_symbol(entry, \"type\", :CT_UNKNOWN) == :CT_DMRAID ||\n Ops.get_symbol(entry, \"type\", :CT_UNKNOWN) == :CT_DMMULTIPATH ||\n Ops.get_symbol(entry, \"type\", :CT_UNKNOWN) == :CT_MDPART ||\n IsRealDisk(entry)\n end", ...
[ "0.554963", "0.54393864", "0.54320496", "0.53941506", "0.5306165", "0.5259705", "0.52464676", "0.52419007", "0.52418816", "0.5201963", "0.51878047", "0.5146065", "0.51062053", "0.51016784", "0.507593", "0.5063504", "0.5013748", "0.50104105", "0.4990257", "0.49901363", "0.4987...
0.74603456
0
Set a conversion proc for the given oid. The callable can be passed either as a argument or a block.
def add_conversion_proc(oid, callable=nil, &block) conversion_procs[oid] = callable || block end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_object_conversion( oid, conversion=nil )\n\t\tconversion = Proc.new if block_given?\n\t\t@object_conversions[ oid ] = conversion\n\tend", "def add_attribute_conversion( oid, conversion=nil )\n\t\tconversion = Proc.new if block_given?\n\t\t@attribute_conversions[ oid ] = conversion\n\tend", "def to_proc...
[ "0.69231147", "0.65239", "0.63744646", "0.6109321", "0.6109321", "0.6109321", "0.6096786", "0.6083638", "0.599398", "0.599398", "0.5993846", "0.5963998", "0.5879365", "0.5862984", "0.5833793", "0.5791118", "0.5768658", "0.56523377", "0.56225663", "0.5618568", "0.5593497", "...
0.8385225
0
Add a conversion proc for a named type, using the given block. This should be used for types without fixed OIDs, which includes all types that are not included in a default PostgreSQL installation.
def add_named_conversion_proc(name, &block) unless oid = from(:pg_type).where(:typtype=>['b', 'e'], :typname=>name.to_s).get(:oid) raise Error, "No matching type in pg_type for #{name.inspect}" end add_conversion_proc(oid, block) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def define_type_convert(type, &block)\n @conversions ||= {}\n @conversions[type] = block\n end", "def register_type(type, &block)\n register CastWhenType.new(type, &block)\n end", "def cast_with_proc(obj, type)\n if (type == :proc || obj.is_a?(::Proc)) && obj.respond_to?(:to_source)\n...
[ "0.69109946", "0.6309396", "0.5923431", "0.5917636", "0.5862878", "0.57383573", "0.5684644", "0.55541337", "0.5495757", "0.54407096", "0.54395115", "0.54209137", "0.53729755", "0.53537726", "0.5343672", "0.533057", "0.523242", "0.5228982", "0.5200437", "0.5194037", "0.5184918...
0.8404523
0
A hash of metadata for CHECK constraints on the table. Keys are CHECK constraint name symbols. Values are hashes with the following keys: :definition :: An SQL fragment for the definition of the constraint :columns :: An array of column symbols for the columns referenced in the constraint, can be an empty array if the database cannot deteremine the column symbols.
def check_constraints(table) m = output_identifier_meth hash = {} _check_constraints_ds.where_each(:conrelid=>regclass_oid(table)) do |row| constraint = m.call(row[:constraint]) entry = hash[constraint] ||= {:definition=>row[:definition], :columns=>[]} entry[:columns] << m.call(row[:column]) if row[:column] end hash end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def constraint_definition_sql(constraint)\n sql = String.new\n sql << \"CONSTRAINT #{quote_identifier(constraint[:name])} \" if constraint[:name] \n case constraint[:type]\n when :check\n check = constraint[:check]\n check = check.first if check.is_a?(Array) && check.length == 1\n...
[ "0.69810116", "0.69258285", "0.6856217", "0.67866033", "0.6391585", "0.62114024", "0.6107531", "0.6050771", "0.6030892", "0.5981253", "0.5973716", "0.591812", "0.58696294", "0.5748742", "0.5745037", "0.57010984", "0.56576616", "0.5642085", "0.5629582", "0.5566976", "0.5486948...
0.7181034
0
Convert the first primary key column in the +table+ from being a serial column to being an identity column. If the column is already an identity column, assume it was already converted and make no changes. Only supported on PostgreSQL 10.2+, since on those versions Sequel will use identity columns instead of serial columns for auto incrementing primary keys. Only supported when running as a superuser, since regular users cannot modify system tables, and there is no way to keep an existing sequence when changing an existing column to be an identity column. This method can raise an exception in at least the following cases where it may otherwise succeed (there may be additional cases not listed here): The serial column was added after table creation using PostgreSQL <7.3 A regular index also exists on the column (such an index can probably be dropped as the primary key index should suffice) Options: :column :: Specify the column to convert instead of using the first primary key column :server :: Run the SQL on the given server
def convert_serial_to_identity(table, opts=OPTS) raise Error, "convert_serial_to_identity is only supported on PostgreSQL 10.2+" unless server_version >= 100002 server = opts[:server] server_hash = server ? {:server=>server} : OPTS ds = dataset ds = ds.server(server) if server raise Error, "convert_serial_to_identity requires superuser permissions" unless ds.get{current_setting('is_superuser')} == 'on' table_oid = regclass_oid(table) im = input_identifier_meth unless column = (opts[:column] || ((sch = schema(table).find{|_, sc| sc[:primary_key] && sc[:auto_increment]}) && sch[0])) raise Error, "could not determine column to convert from serial to identity automatically" end column = im.call(column) column_num = ds.from(:pg_attribute). where(:attrelid=>table_oid, :attname=>column). get(:attnum) pg_class = Sequel.cast('pg_class', :regclass) res = ds.from(:pg_depend). where(:refclassid=>pg_class, :refobjid=>table_oid, :refobjsubid=>column_num, :classid=>pg_class, :objsubid=>0, :deptype=>%w'a i'). select_map([:objid, Sequel.as({:deptype=>'i'}, :v)]) case res.length when 0 raise Error, "unable to find related sequence when converting serial to identity" when 1 seq_oid, already_identity = res.first else raise Error, "more than one linked sequence found when converting serial to identity" end return if already_identity transaction(server_hash) do run("ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(column)} DROP DEFAULT", server_hash) ds.from(:pg_depend). where(:classid=>pg_class, :objid=>seq_oid, :objsubid=>0, :deptype=>'a'). update(:deptype=>'i') ds.from(:pg_attribute). where(:attrelid=>table_oid, :attname=>column). update(:attidentity=>'d') end remove_cached_schema(table) nil end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def primary_key_sequence(table, opts=OPTS)\n quoted_table = quote_schema_table(table)\n Sequel.synchronize{return @primary_key_sequences[quoted_table] if @primary_key_sequences.has_key?(quoted_table)}\n cond = {Sequel[:t][:oid] => regclass_oid(table, opts)}\n value = if pks = _select_se...
[ "0.6523442", "0.6269975", "0.6269304", "0.6203986", "0.6120233", "0.6039965", "0.60150903", "0.5958063", "0.5902873", "0.58989686", "0.5894372", "0.58397007", "0.5799071", "0.57580894", "0.57580894", "0.5664597", "0.5664597", "0.56134826", "0.56110376", "0.56110376", "0.56054...
0.8096805
0
Creates the function in the database. Arguments: name :: name of the function to create definition :: string definition of the function, or object file for a dynamically loaded C function. opts :: options hash: :args :: function arguments, can be either a symbol or string specifying a type or an array of 13 elements: 1 :: argument data type 2 :: argument name 3 :: argument mode (e.g. in, out, inout) :behavior :: Should be IMMUTABLE, STABLE, or VOLATILE. PostgreSQL assumes VOLATILE by default. :parallel :: The thread safety attribute of the function. Should be SAFE, UNSAFE, RESTRICTED. PostgreSQL assumes UNSAFE by default. :cost :: The estimated cost of the function, used by the query planner. :language :: The language the function uses. SQL is the default. :link_symbol :: For a dynamically loaded see function, the function's link symbol if different from the definition argument. :returns :: The data type returned by the function. If you are using OUT or INOUT argument modes, this is ignored. Otherwise, if this is not specified, void is used by default to specify the function is not supposed to return a value. :rows :: The estimated number of rows the function will return. Only use if the function returns SETOF something. :security_definer :: Makes the privileges of the function the same as the privileges of the user who defined the function instead of the privileges of the user who runs the function. There are security implications when doing this, see the PostgreSQL documentation. :set :: Configuration variables to set while the function is being run, can be a hash or an array of two pairs. search_path is often used here if :security_definer is used. :strict :: Makes the function return NULL when any argument is NULL.
def create_function(name, definition, opts=OPTS) self << create_function_sql(name, definition, opts) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_function_sql(name, definition, opts=OPTS)\n args = opts[:args]\n if !opts[:args].is_a?(Array) || !opts[:args].any?{|a| Array(a).length == 3 and %w'OUT INOUT'.include?(a[2].to_s)}\n returns = opts[:returns] || 'void'\n end\n language = opts[:language] || 'SQL'\n ...
[ "0.7315047", "0.70606333", "0.66673636", "0.6550817", "0.64636093", "0.6045642", "0.58598155", "0.5743289", "0.55446565", "0.550021", "0.5414134", "0.5371125", "0.53436047", "0.5325446", "0.5200718", "0.5181034", "0.5116383", "0.50801814", "0.5057106", "0.50351334", "0.502805...
0.7249979
1
Create the procedural language in the database. Arguments: name :: Name of the procedural language (e.g. plpgsql) opts :: options hash: :handler :: The name of a previously registered function used as a call handler for this language. :replace :: Replace the installed language if it already exists (on PostgreSQL 9.0+). :trusted :: Marks the language being created as trusted, allowing unprivileged users to create functions using this language. :validator :: The name of previously registered function used as a validator of functions defined in this language.
def create_language(name, opts=OPTS) self << create_language_sql(name, opts) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_language_sql(name, opts=OPTS)\n \"CREATE#{' OR REPLACE' if opts[:replace] && server_version >= 90000}#{' TRUSTED' if opts[:trusted]} LANGUAGE #{name}#{\" HANDLER #{opts[:handler]}\" if opts[:handler]}#{\" VALIDATOR #{opts[:validator]}\" if opts[:validator]}\"\n end", "def create_proc(name,...
[ "0.7619942", "0.6248246", "0.62357014", "0.6163669", "0.584984", "0.5509536", "0.53380984", "0.5277645", "0.5253506", "0.52450913", "0.51921475", "0.5182267", "0.51447105", "0.50993854", "0.50829625", "0.5075793", "0.5061333", "0.5004424", "0.5002058", "0.5002025", "0.4987358...
0.7850673
0
Create a schema in the database. Arguments: name :: Name of the schema (e.g. admin) opts :: options hash: :if_not_exists :: Don't raise an error if the schema already exists (PostgreSQL 9.3+) :owner :: The owner to set for the schema (defaults to current user if not specified)
def create_schema(name, opts=OPTS) self << create_schema_sql(name, opts) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_schema_sql(name, opts=OPTS)\n \"CREATE SCHEMA #{'IF NOT EXISTS ' if opts[:if_not_exists]}#{quote_identifier(name)}#{\" AUTHORIZATION #{literal(opts[:owner])}\" if opts[:owner]}\"\n end", "def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end...
[ "0.7882215", "0.76678604", "0.76181155", "0.73712593", "0.73512733", "0.7165824", "0.70954925", "0.69713676", "0.6500826", "0.6297565", "0.62504894", "0.61846215", "0.6069834", "0.6034471", "0.59912986", "0.59591717", "0.5923187", "0.5832113", "0.5630911", "0.56262213", "0.55...
0.76356405
2
Support partitions of tables using the :partition_of option.
def create_table(name, options=OPTS, &block) if options[:partition_of] create_partition_of_table_from_generator(name, CreatePartitionOfTableGenerator.new(&block), options) return end super end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def partitioned_tables\n PartitionedTables.new(connection).all\n end", "def create_partition_of_table_sql(name, generator, options)\n sql = create_table_prefix_sql(name, options).dup\n\n sql << \" PARTITION OF #{quote_schema_table(options[:partition_of])}\"\n\n case generator.par...
[ "0.73846465", "0.6675539", "0.6539488", "0.64836276", "0.62881833", "0.62800586", "0.6146445", "0.61269706", "0.591312", "0.58228123", "0.5668915", "0.5609678", "0.5595541", "0.5555945", "0.551449", "0.5510789", "0.54816824", "0.5454161", "0.5439815", "0.5434736", "0.54189837...
0.64252746
4
Support partitions of tables using the :partition_of option.
def create_table?(name, options=OPTS, &block) if options[:partition_of] create_table(name, options.merge!(:if_not_exists=>true), &block) return end super end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def partitioned_tables\n PartitionedTables.new(connection).all\n end", "def create_partition_of_table_sql(name, generator, options)\n sql = create_table_prefix_sql(name, options).dup\n\n sql << \" PARTITION OF #{quote_schema_table(options[:partition_of])}\"\n\n case generator.par...
[ "0.73860645", "0.6674913", "0.65416634", "0.64847636", "0.6425398", "0.6288182", "0.62805045", "0.6146301", "0.6127633", "0.5911713", "0.5668943", "0.5609965", "0.55949575", "0.55547947", "0.55140114", "0.5512072", "0.5481167", "0.5452773", "0.54395694", "0.5434945", "0.54194...
0.58224213
10
Create a trigger in the database. Arguments: table :: the table on which this trigger operates name :: the name of this trigger function :: the function to call for this trigger, which should return type trigger. opts :: options hash: :after :: Calls the trigger after execution instead of before. :args :: An argument or array of arguments to pass to the function. :each_row :: Calls the trigger for each row instead of for each statement. :events :: Can be :insert, :update, :delete, or an array of any of those. Calls the trigger whenever that type of statement is used. By default, the trigger is called for insert, update, or delete. :replace :: Replace the trigger with the same name if it already exists (PostgreSQL 14+). :when :: A filter to use for the trigger
def create_trigger(table, name, function, opts=OPTS) self << create_trigger_sql(table, name, function, opts) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_trigger_sql(table, name, function, opts=OPTS)\n events = opts[:events] ? Array(opts[:events]) : [:insert, :update, :delete]\n whence = opts[:after] ? 'AFTER' : 'BEFORE'\n if filter = opts[:when]\n raise Error, \"Trigger conditions are not supported for this database\" unles...
[ "0.75036204", "0.71252567", "0.71252567", "0.6725345", "0.63516825", "0.6272723", "0.59450215", "0.5315169", "0.51494783", "0.5082", "0.506205", "0.5035983", "0.50323826", "0.49635643", "0.49058893", "0.48846143", "0.4867852", "0.48573887", "0.48499548", "0.4799055", "0.47917...
0.77001125
0
Use PostgreSQL's DO syntax to execute an anonymous code block. The code should be the literal code string to use in the underlying procedural language. Options: :language :: The procedural language the code is written in. The PostgreSQL default is plpgsql. Can be specified as a string or a symbol.
def do(code, opts=OPTS) language = opts[:language] run "DO #{"LANGUAGE #{literal(language.to_s)} " if language}#{literal(code)}" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def block_code(code, language)\n code\n end", "def code(lang, &block)\n source_code = capture(&block).strip\n eval('_erbout', block.binding).concat %Q(<pre class=\"CodeRay\"><code class=\"language-#{lang}\">#{html_escape source_code}</code></pre>)\nend", "def block_code(code, language)\n if ...
[ "0.6247023", "0.5933642", "0.5895321", "0.5683034", "0.56672835", "0.5577568", "0.5519545", "0.5435928", "0.54297894", "0.5415428", "0.5415428", "0.5401936", "0.5354501", "0.5351753", "0.5335078", "0.5329581", "0.53250164", "0.530187", "0.5296145", "0.5272397", "0.5240748", ...
0.66809785
0
Drops the function from the database. Arguments: name :: name of the function to drop opts :: options hash: :args :: The arguments for the function. See create_function_sql. :cascade :: Drop other objects depending on this function. :if_exists :: Don't raise an error if the function doesn't exist.
def drop_function(name, opts=OPTS) self << drop_function_sql(name, opts) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def drop_function_sql(name, opts=OPTS)\n \"DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts[:cascade]}\"\n end", "def drop_function(function_name, params, options = {})\n SchemaMonkey::Middleware::Migration::CreateFunction.start(co...
[ "0.79073864", "0.786958", "0.7367995", "0.7303213", "0.71446484", "0.71446484", "0.7094034", "0.70206577", "0.5780303", "0.5701886", "0.56950027", "0.56453824", "0.5614687", "0.5581923", "0.5575382", "0.55501544", "0.54821366", "0.54505163", "0.54489833", "0.54471713", "0.542...
0.74234545
2
Drops a procedural language from the database. Arguments: name :: name of the procedural language to drop opts :: options hash: :cascade :: Drop other objects depending on this function. :if_exists :: Don't raise an error if the function doesn't exist.
def drop_language(name, opts=OPTS) self << drop_language_sql(name, opts) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def drop_language_sql(name, opts=OPTS)\n \"DROP LANGUAGE#{' IF EXISTS' if opts[:if_exists]} #{name}#{' CASCADE' if opts[:cascade]}\"\n end", "def drop_function_sql(name, opts=OPTS)\n \"DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts...
[ "0.771682", "0.62814724", "0.6009372", "0.6009372", "0.5947735", "0.5887368", "0.58383554", "0.58220875", "0.57442856", "0.5699863", "0.56750304", "0.56655055", "0.5662529", "0.562056", "0.5534475", "0.551798", "0.54874253", "0.54649365", "0.5445987", "0.54157716", "0.5367591...
0.71211535
1
Drops a schema from the database. Arguments: name :: name of the schema to drop opts :: options hash: :cascade :: Drop all objects in this schema. :if_exists :: Don't raise an error if the schema doesn't exist.
def drop_schema(name, opts=OPTS) self << drop_schema_sql(name, opts) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def drop_schema(schema_name, options = {})\n execute \"DROP SCHEMA#{' IF EXISTS' if options[:if_exists]} #{quote_schema_name(schema_name)} CASCADE\"\n end", "def drop_schema_sql(name, opts=OPTS)\n \"DROP SCHEMA#{' IF EXISTS' if opts[:if_exists]} #{quote_identifier(name)}#{' CASCADE' if opt...
[ "0.8012164", "0.78485155", "0.78375214", "0.7437326", "0.7366318", "0.72286713", "0.7122654", "0.702088", "0.69215715", "0.672576", "0.661924", "0.6551986", "0.6446336", "0.6397896", "0.63680106", "0.629387", "0.62173814", "0.6208451", "0.6206428", "0.6178284", "0.6169206", ...
0.7423064
4
Drops a trigger from the database. Arguments: table :: table from which to drop the trigger name :: name of the trigger to drop opts :: options hash: :cascade :: Drop other objects depending on this function. :if_exists :: Don't raise an error if the function doesn't exist.
def drop_trigger(table, name, opts=OPTS) self << drop_trigger_sql(table, name, opts) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def drop_trigger(table_name, trigger_name, options = {})\n SchemaMonkey::Middleware::Migration::CreateTrigger.start(connection: self, table_name: table_name, trigger_name: trigger_name, options: options) do |env|\n table_name = env.table_name\n trigger_name = env.trigger_name\n ...
[ "0.7669092", "0.7631279", "0.7302634", "0.7302634", "0.69826895", "0.670343", "0.66852194", "0.6222981", "0.6037891", "0.5920438", "0.57999533", "0.5695673", "0.5686736", "0.5677208", "0.56751347", "0.56388307", "0.56238425", "0.5610893", "0.5580581", "0.555721", "0.55252904"...
0.7433689
2
Return full foreign key information using the pg system tables, including :name, :on_delete, :on_update, and :deferrable entries in the hashes. Supports additional options: :reverse :: Instead of returning foreign keys in the current table, return foreign keys in other tables that reference the current table. :schema :: Set to true to have the :table value in the hashes be a qualified identifier. Set to false to use a separate :schema value with the related schema. Defaults to whether the given table argument is a qualified identifier.
def foreign_key_list(table, opts=OPTS) m = output_identifier_meth schema, _ = opts.fetch(:schema, schema_and_table(table)) h = {} fklod_map = FOREIGN_KEY_LIST_ON_DELETE_MAP reverse = opts[:reverse] (reverse ? _reverse_foreign_key_list_ds : _foreign_key_list_ds).where_each(Sequel[:cl][:oid]=>regclass_oid(table)) do |row| if reverse key = [row[:schema], row[:table], row[:name]] else key = row[:name] end if r = h[key] r[:columns] << m.call(row[:column]) r[:key] << m.call(row[:refcolumn]) else entry = h[key] = { :name=>m.call(row[:name]), :columns=>[m.call(row[:column])], :key=>[m.call(row[:refcolumn])], :on_update=>fklod_map[row[:on_update]], :on_delete=>fklod_map[row[:on_delete]], :deferrable=>row[:deferrable], :table=>schema ? SQL::QualifiedIdentifier.new(m.call(row[:schema]), m.call(row[:table])) : m.call(row[:table]), } unless schema # If not combining schema information into the :table entry # include it as a separate entry. entry[:schema] = m.call(row[:schema]) end end end h.values end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dump_table_foreign_keys(table, options=OPTS)\n if supports_foreign_key_parsing?\n fks = foreign_key_list(table, options).sort_by{|fk| fk[:columns]}\n end\n\n if fks.nil? || fks.empty?\n ''\n else\n dump_add_fk_constraints(table, fks)\n end\n end", "def foreign...
[ "0.72586703", "0.70443857", "0.7040204", "0.65597934", "0.64049715", "0.6336679", "0.6077278", "0.60334164", "0.60242915", "0.60242915", "0.60122025", "0.5995262", "0.5981016", "0.5927055", "0.5910303", "0.5858759", "0.58397806", "0.5837277", "0.5833357", "0.5808299", "0.5792...
0.7318138
0
Use the pg_ system tables to determine indexes on a table
def indexes(table, opts=OPTS) m = output_identifier_meth cond = {Sequel[:tab][:oid]=>regclass_oid(table, opts)} cond[:indpred] = nil unless opts[:include_partial] indexes = {} _indexes_ds.where_each(cond) do |r| i = indexes[m.call(r[:name])] ||= {:columns=>[], :unique=>r[:unique], :deferrable=>r[:deferrable]} i[:columns] << m.call(r[:column]) end indexes end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def indexes(table_name, name = nil)\n schemas = schema_search_path.split(/,/).map { |p| quote(p) }.join(',')\n \n # Changed from upstread: link to pg_am to grab the index type (e.g. \"gist\")\n result = query(<<-SQL, name)\n SELECT distinct i.relname, d.indisunique, d.indkey, t.oid, am.amname...
[ "0.78263485", "0.7813025", "0.7794974", "0.7794034", "0.7673546", "0.76150733", "0.7565812", "0.75639427", "0.7554696", "0.7554696", "0.7390897", "0.7339004", "0.7273254", "0.7258199", "0.72453284", "0.71867275", "0.7170862", "0.714218", "0.704027", "0.7033335", "0.7005751", ...
0.6592221
29
Dataset containing all current database locks
def locks dataset.from(:pg_class).join(:pg_locks, :relation=>:relfilenode).select{[pg_class[:relname], Sequel::SQL::ColumnAll.new(:pg_locks)]} end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def locks\n sql = %q(\n select\n pg_stat_activity.procpid,\n pg_class.relname,\n pg_locks.transactionid,\n pg_locks.granted,\n substr(pg_stat_activity.current_query,1,30) as query_snippet,\n age(now(),pg_stat_activity.query_start) as \"age\"\n from pg_stat_activity,pg_locks left\n ...
[ "0.6448412", "0.63300234", "0.6222221", "0.5978553", "0.5910206", "0.58808726", "0.57571405", "0.5666715", "0.56507355", "0.56354165", "0.5576385", "0.5576385", "0.5563828", "0.55490714", "0.55186725", "0.54743737", "0.5468267", "0.54595023", "0.54589605", "0.5430492", "0.540...
0.80763364
0
Notifies the given channel. See the PostgreSQL NOTIFY documentation. Options: :payload :: The payload string to use for the NOTIFY statement. Only supported in PostgreSQL 9.0+. :server :: The server to which to send the NOTIFY statement, if the sharding support is being used.
def notify(channel, opts=OPTS) sql = String.new sql << "NOTIFY " dataset.send(:identifier_append, sql, channel) if payload = opts[:payload] sql << ", " dataset.literal_append(sql, payload.to_s) end execute_ddl(sql, opts) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def notify_channel(connection, channel, payload)\n connection.execute(\"NOTIFY #{channel}, #{payload}\")\n end", "def notify msg, channel \n if @slack_url == '' \n puts \"No slack webhook url specified\"\n return\n end\n \n hash = {:text => msg, :channel => channel}\n json = JSON.gener...
[ "0.8064743", "0.57137465", "0.5685415", "0.56356466", "0.5602777", "0.55876213", "0.54835033", "0.5469714", "0.5420852", "0.5420809", "0.5384294", "0.5377497", "0.53681254", "0.53485763", "0.5333337", "0.5321088", "0.5249252", "0.523603", "0.5228612", "0.52157784", "0.5215308...
0.77982366
1
Return primary key for the given table.
def primary_key(table, opts=OPTS) quoted_table = quote_schema_table(table) Sequel.synchronize{return @primary_keys[quoted_table] if @primary_keys.has_key?(quoted_table)} value = _select_pk_ds.where_single_value(Sequel[:pg_class][:oid] => regclass_oid(table, opts)) Sequel.synchronize{@primary_keys[quoted_table] = value} end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def primary_key(table)\n pk_and_sequence = pk_and_sequence_for(table)\n pk_and_sequence && pk_and_sequence.first\n end", "def primary_key table\n return nil unless table\n table = self[table]\n pk = table.column_names.find{ |c| table[c].primary_key? }\n end", "def p...
[ "0.8734996", "0.8696134", "0.85477465", "0.8429715", "0.8329904", "0.83237815", "0.8244794", "0.8160607", "0.7980489", "0.7939842", "0.7795205", "0.7681589", "0.76727057", "0.7614512", "0.75594294", "0.7505976", "0.74860096", "0.7477005", "0.7477005", "0.7330574", "0.7314233"...
0.80371535
8
Return the sequence providing the default for the primary key for the given table.
def primary_key_sequence(table, opts=OPTS) quoted_table = quote_schema_table(table) Sequel.synchronize{return @primary_key_sequences[quoted_table] if @primary_key_sequences.has_key?(quoted_table)} cond = {Sequel[:t][:oid] => regclass_oid(table, opts)} value = if pks = _select_serial_sequence_ds.first(cond) literal(SQL::QualifiedIdentifier.new(pks[:schema], pks[:sequence])) elsif pks = _select_custom_sequence_ds.first(cond) literal(SQL::QualifiedIdentifier.new(pks[:schema], LiteralString.new(pks[:sequence]))) end Sequel.synchronize{@primary_key_sequences[quoted_table] = value} if value end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def default_sequence_name(table_name, pk = nil) #:nodoc:\n default_pk, default_seq = pk_and_sequence_for(table_name)\n default_seq || \"#{table_name}_#{pk || default_pk || 'id'}_seq\"\n end", "def default_sequence_name(table_name, pri_key = nil)\n serial_sequence(table_name, pri_key || ...
[ "0.8170994", "0.81505257", "0.8027529", "0.7910902", "0.78652835", "0.78608876", "0.78294235", "0.74895513", "0.7457443", "0.7406263", "0.7299918", "0.7291324", "0.72609276", "0.72075987", "0.7121409", "0.70861423", "0.7080454", "0.69758296", "0.6911338", "0.680072", "0.67957...
0.7384305
10
Refresh the materialized view with the given name. DB.refresh_view(:items_view) REFRESH MATERIALIZED VIEW items_view DB.refresh_view(:items_view, concurrently: true) REFRESH MATERIALIZED VIEW CONCURRENTLY items_view
def refresh_view(name, opts=OPTS) run "REFRESH MATERIALIZED VIEW#{' CONCURRENTLY' if opts[:concurrently]} #{quote_schema_table(name)}" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def refresh_materialized_view(name, options = {})\n options = {\n :with_data => true\n }.merge(options)\n\n sql = \"REFRESH MATERIALIZED VIEW #{quote_view_name(name)}\"\n sql << \" WITH NO DATA\" unless options[:with_data]\n\n execute \"#{sql};\"\n end", "def refr...
[ "0.7993562", "0.7882737", "0.75135463", "0.7201716", "0.6636582", "0.643207", "0.6383927", "0.6365476", "0.6278349", "0.610675", "0.60786706", "0.6030206", "0.59427327", "0.58854914", "0.58234787", "0.5808679", "0.5808097", "0.5795441", "0.5790887", "0.5762876", "0.5745043", ...
0.8195975
0
Reset the primary key sequence for the given table, basing it on the maximum current value of the table's primary key.
def reset_primary_key_sequence(table) return unless seq = primary_key_sequence(table) pk = SQL::Identifier.new(primary_key(table)) db = self s, t = schema_and_table(table) table = Sequel.qualify(s, t) if s if server_version >= 100000 seq_ds = metadata_dataset.from(:pg_sequence).where(:seqrelid=>regclass_oid(LiteralString.new(seq))) increment_by = :seqincrement min_value = :seqmin # :nocov: else seq_ds = metadata_dataset.from(LiteralString.new(seq)) increment_by = :increment_by min_value = :min_value # :nocov: end get{setval(seq, db[table].select(coalesce(max(pk)+seq_ds.select(increment_by), seq_ds.select(min_value))), false)} end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reset_pk_sequence!(table, pk = nil, sequence = nil)\n if ! pk || ! sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n pk ||= default_pk; sequence ||= default_sequence\n end\n if pk && sequence\n quoted_sequence = quote_column_name(sequence)\n\n sel...
[ "0.81005555", "0.80798966", "0.80725735", "0.80469537", "0.80416894", "0.7450408", "0.74204576", "0.7341868", "0.72347623", "0.71692264", "0.7104913", "0.7014823", "0.6871635", "0.66317236", "0.66317236", "0.66184896", "0.6581778", "0.65705836", "0.6544491", "0.6426742", "0.6...
0.8832561
0
PostgreSQL uses SERIAL psuedotype instead of AUTOINCREMENT for managing incrementing primary keys.
def serial_primary_key_options # :nocov: auto_increment_key = server_version >= 100002 ? :identity : :serial # :nocov: {:primary_key => true, auto_increment_key => true, :type=>Integer} end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def serial_primary_key_options\n {:primary_key => true, :type => :integer, :auto_increment => true}\n end", "def serial_primary_key_options\n {:primary_key => true, :type => :integer, :auto_increment => true}\n end", "def serial_primary_key_options\n {:primary_key => true, :type => Integer...
[ "0.71952075", "0.71952075", "0.71893656", "0.71893656", "0.70656824", "0.6960461", "0.69145495", "0.69145495", "0.68647623", "0.68647623", "0.68033075", "0.67720366", "0.65689373", "0.6540689", "0.65311176", "0.6473395", "0.6473395", "0.642823", "0.6426254", "0.64066917", "0....
0.645073
17
The version of the PostgreSQL server, used for determining capability.
def server_version(server=nil) return @server_version if @server_version ds = dataset ds = ds.server(server) if server @server_version = swallow_database_error{ds.with_sql("SELECT CAST(current_setting('server_version_num') AS integer) AS v").single_value} || 0 end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def server_version\n db.server_version(@opts[:server])\n end", "def version\n @version ||= exec('SHOW server_version')[0]['server_version'].split[0]\n end", "def get_server_version\n server_info[:server_version]\n end", "def server_version; end", "def server_version\n Serve...
[ "0.8236167", "0.82214254", "0.77873117", "0.74867266", "0.74571335", "0.73964053", "0.7312636", "0.7275439", "0.7234323", "0.72188014", "0.7165389", "0.7121293", "0.7120812", "0.71062684", "0.7072108", "0.7003863", "0.7002735", "0.6965803", "0.69554186", "0.69160396", "0.6834...
0.7409582
5
PostgreSQL supports CREATE TABLE IF NOT EXISTS on 9.1+
def supports_create_table_if_not_exists? server_version >= 90100 end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def supports_create_table_if_not_exists?\n true\n end", "def supports_create_table_if_not_exists?\n false\n end", "def supports_drop_table_if_exists?\n supports_create_table_if_not_exists?\n end", "def create_table?(*args, &block)\n create_table(*args, &block) unless tabl...
[ "0.8169112", "0.81165934", "0.74088", "0.7350597", "0.7100451", "0.7100451", "0.7086276", "0.6980296", "0.6929266", "0.68034923", "0.6684206", "0.66616875", "0.65244293", "0.6499984", "0.64946395", "0.6465946", "0.6458165", "0.64432794", "0.64391005", "0.6437936", "0.64346486...
0.7571522
2
PostgreSQL 9.0+ supports some types of deferrable constraints beyond foreign key constraints.
def supports_deferrable_constraints? server_version >= 90000 end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def supports_deferrable_foreign_key_constraints?\n true\n end", "def supports_deferrable_foreign_key_constraints?\n true\n end", "def supports_deferrable_foreign_key_constraints?\n supports_deferrable_constraints?\n end", "def supports_deferrable_constraints?\n false\n ...
[ "0.79280597", "0.79274535", "0.7805954", "0.734535", "0.72932184", "0.68518835", "0.6851376", "0.6846686", "0.65102047", "0.63518226", "0.63061714", "0.63061714", "0.6213247", "0.6213247", "0.6106287", "0.6096646", "0.608334", "0.6045513", "0.6045513", "0.6045513", "0.6021384...
0.540124
68
PostgreSQL supports deferrable foreign key constraints.
def supports_deferrable_foreign_key_constraints? true end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def supports_deferrable_foreign_key_constraints?\n supports_deferrable_constraints?\n end", "def foreign_key_present?\n false\n end", "def foreign_key_present?\n false\n end", "def foreign_keys\n @foreign_keys ||= connection.foreign_keys(table_name, \"#{name} ...
[ "0.7820092", "0.7030612", "0.7030612", "0.68069375", "0.677845", "0.67621166", "0.67621166", "0.674389", "0.6702672", "0.66943294", "0.66943294", "0.661967", "0.65737766", "0.65495896", "0.65211123", "0.6496953", "0.6462772", "0.64350355", "0.641176", "0.6408627", "0.6408627"...
0.8199576
1
PostgreSQL supports DROP TABLE IF EXISTS
def supports_drop_table_if_exists? true end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_drop_table_if_exists\n connection.create_table(:testings)\n assert connection.table_exists?(:testings)\n connection.drop_table(:testings, if_exists: true)\n assert_not connection.table_exists?(:testings)\n end", "def drop_table_sql(name, options)\n \"DROP ...
[ "0.75406855", "0.7447282", "0.7352017", "0.7257186", "0.70561236", "0.70199263", "0.7016024", "0.6948586", "0.692693", "0.69207895", "0.69183964", "0.68844426", "0.68596363", "0.68555564", "0.68210626", "0.67926085", "0.6784143", "0.6715057", "0.6701895", "0.66934294", "0.667...
0.7773049
1
PostgreSQL supports partial indexes.
def supports_partial_indexes? true end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def supports_partial_indexes?\n false\n end", "def supports_partial_index?\n false\n end", "def supports_partial_index?\n false\n end", "def indexes(table_name, name = nil)\n result = query(<<-SQL, 'SCHEMA')\n SELECT distinct i.relname, d.indisunique, d.ind...
[ "0.78717965", "0.7054134", "0.7054134", "0.63987356", "0.63508844", "0.63295084", "0.6323869", "0.6159993", "0.61426693", "0.61346114", "0.6086542", "0.60757685", "0.6067708", "0.6039056", "0.60032016", "0.5991477", "0.5991477", "0.5944628", "0.5943683", "0.5924851", "0.59207...
0.79788727
0
PostgreSQL 9.0+ supports trigger conditions.
def supports_trigger_conditions? server_version >= 90000 end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pgt_trigger(table, trigger_name, function_name, events, definition, opts={})\n create_function(function_name, definition, :language=>:plpgsql, :returns=>:trigger, :replace=>true)\n create_trigger(table, trigger_name, function_name, :events=>events, :each_row=>true, :after=>opts[:after])\n en...
[ "0.6295187", "0.6269034", "0.6183004", "0.6004594", "0.5989614", "0.59486556", "0.58836603", "0.5881881", "0.58626014", "0.5818532", "0.5760473", "0.56977266", "0.5681853", "0.56244934", "0.56173915", "0.5532288", "0.55016947", "0.5458493", "0.5457816", "0.5444176", "0.543475...
0.62159413
2
PostgreSQL supports prepared transactions (twophase commit) if max_prepared_transactions is greater than 0.
def supports_prepared_transactions? return @supports_prepared_transactions if defined?(@supports_prepared_transactions) @supports_prepared_transactions = self['SHOW max_prepared_transactions'].get.to_i > 0 end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def supports_prepared_transactions?\n false\n end", "def on_prepare_transaction_commit(unit, transaction); end", "def commit_transaction(conn, opts=OPTS)\n if (s = opts[:prepare]) && savepoint_level(conn) <= 1\n log_connection_execute(conn, \"PREPARE TRANSACTION #{literal(s)}\")\n ...
[ "0.7779416", "0.67318296", "0.66393447", "0.6488887", "0.64478713", "0.642692", "0.62858105", "0.6274448", "0.6274448", "0.62604916", "0.6229228", "0.6184096", "0.6167285", "0.6167285", "0.61652166", "0.61652166", "0.60421", "0.59701645", "0.5968591", "0.59547496", "0.592801"...
0.7233213
1
PostgreSQL supports transaction isolation levels
def supports_transaction_isolation_levels? true end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def supports_transaction_isolation?\n true\n end", "def supports_transaction_isolation_levels?\n true\n end", "def run_in_transaction isolation\n if isolation\n Base.transaction isolation: isolation do\n yield\n end\n else\n yi...
[ "0.7294652", "0.72276586", "0.7209305", "0.7209305", "0.70683", "0.699975", "0.69545364", "0.69545364", "0.6950709", "0.6950709", "0.6924159", "0.6880454", "0.68508345", "0.68450475", "0.6827295", "0.67907196", "0.6744684", "0.6744684", "0.6744684", "0.6701072", "0.6701072", ...
0.7193443
4
PostgreSQL supports transaction DDL statements.
def supports_transactional_ddl? true end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def supports_transactional_ddl?\n false\n end", "def begin_db_transaction\n execute(\"BEGIN\")\n end", "def begin_db_transaction\n execute(\"BEGIN\")\n end", "def supports_ddl_transactions?\n false\n end", "def supports_ddl_transactions?\n false\n ...
[ "0.7055532", "0.69787323", "0.69787323", "0.68916005", "0.68916005", "0.68916005", "0.6853812", "0.6645684", "0.6645684", "0.6479298", "0.6479298", "0.64768124", "0.64768124", "0.64768124", "0.6476242", "0.6454384", "0.6442726", "0.6442726", "0.6414321", "0.6414321", "0.64097...
0.725727
0
Array of symbols specifying table names in the current database. The dataset used is yielded to the block if one is provided, otherwise, an array of symbols of table names is returned. Options: :qualify :: Return the tables as Sequel::SQL::QualifiedIdentifier instances, using the schema the table is located in as the qualifier. :schema :: The schema to search :server :: The server to use
def tables(opts=OPTS, &block) pg_class_relname(['r', 'p'], opts, &block) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def tables\n table_names.map { |tn| table(tn) }\n end", "def tables\n options.fetch(:tables)\n end", "def tables(opts={})\n m = output_identifier_meth\n metadata_dataset.from(:information_schema__tables___t).\n select(:table_name).\n filter(:table_type=>'BASE TABLE',...
[ "0.6650578", "0.65994585", "0.6580728", "0.65064585", "0.64151305", "0.64074725", "0.63925153", "0.6348351", "0.6316044", "0.62666374", "0.6252346", "0.6231937", "0.6230909", "0.6217725", "0.6167698", "0.6167294", "0.61424625", "0.6121533", "0.61198103", "0.60758275", "0.6073...
0.6096867
19
Check whether the given type name string/symbol (e.g. :hstore) is supported by the database.
def type_supported?(type) Sequel.synchronize{return @supported_types[type] if @supported_types.has_key?(type)} supported = from(:pg_type).where(:typtype=>'b', :typname=>type.to_s).count > 0 Sequel.synchronize{return @supported_types[type] = supported} end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def supports?(dbtype)\n (:ctan.eql? dbtype.to_sym)\n end", "def valid_type?(type)\n !native_database_types[type].nil?\n end", "def type_exists?(name)\n user_defined_types.key? name.to_s\n end", "def supports?(type)\n supported.include? type.to_sym\n end", "def ch...
[ "0.71684283", "0.71072584", "0.6997427", "0.6890807", "0.6834597", "0.66947275", "0.6680831", "0.6505652", "0.63379216", "0.6317333", "0.62921417", "0.6221379", "0.6195991", "0.6191106", "0.61868876", "0.6179928", "0.61571366", "0.6141454", "0.6141323", "0.6129752", "0.609230...
0.67410797
5
Creates a dataset that uses the VALUES clause: DB.values([[1, 2], [3, 4]]) VALUES ((1, 2), (3, 4)) DB.values([[1, 2], [3, 4]]).order(:column2).limit(1, 1) VALUES ((1, 2), (3, 4)) ORDER BY column2 LIMIT 1 OFFSET 1
def values(v) raise Error, "Cannot provide an empty array for values" if v.empty? @default_dataset.clone(:values=>v) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def select_values_sql(sql)\n sql << \"VALUES \"\n expression_list_append(sql, opts[:values])\n end", "def test_values\n value = nil\n assert_nothing_raised do\n value = ActiveRecord::Base.connection.send(:select_rows, \"VALUES('ur', 'doin', 'it', 'right')\")\n end\n assert_equ...
[ "0.6753085", "0.62763065", "0.6110033", "0.60504055", "0.60172516", "0.6014192", "0.6014192", "0.6014192", "0.59369415", "0.59227353", "0.5892935", "0.5834878", "0.5817698", "0.58152235", "0.57881755", "0.57512873", "0.5743603", "0.56180286", "0.56150466", "0.5567219", "0.556...
0.5404321
35
Array of symbols specifying view names in the current database. Options: :materialized :: Return materialized views :qualify :: Return the views as Sequel::SQL::QualifiedIdentifier instances, using the schema the view is located in as the qualifier. :schema :: The schema to search :server :: The server to use
def views(opts=OPTS) relkind = opts[:materialized] ? 'm' : 'v' pg_class_relname(relkind, opts) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def views(name = nil)\n select_values(\"SELECT table_name FROM information_schema.views\", name)\n end", "def get_views\n connect_db.fetch(\"SELECT RDB$RELATION_NAME, RDB$VIEW_SOURCE FROM RDB$RELATIONS WHERE RDB$VIEW_BLR IS NOT NULL AND (RDB$SYSTEM_FLAG IS NULL OR RDB$SYSTEM_FLAG = 0)\")\n end", ...
[ "0.7103677", "0.66371846", "0.62148297", "0.6162572", "0.6033561", "0.5926373", "0.58784133", "0.5872362", "0.5858072", "0.5778937", "0.5770808", "0.5722647", "0.57035476", "0.563731", "0.55852634", "0.55369884", "0.5510381", "0.5483866", "0.5450678", "0.54368955", "0.5373665...
0.69248116
1
Dataset used to retrieve CHECK constraint information
def _check_constraints_ds @_check_constraints_ds ||= metadata_dataset. from{pg_constraint.as(:co)}. left_join(Sequel[:pg_attribute].as(:att), :attrelid=>:conrelid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])). where(:contype=>'c'). select{[co[:conname].as(:constraint), att[:attname].as(:column), pg_get_constraintdef(co[:oid]).as(:definition)]} end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_constraints(table)\n m = output_identifier_meth\n\n hash = {}\n _check_constraints_ds.where_each(:conrelid=>regclass_oid(table)) do |row|\n constraint = m.call(row[:constraint])\n entry = hash[constraint] ||= {:definition=>row[:definition], :columns=>[]}\n ...
[ "0.6155641", "0.60944134", "0.5996631", "0.59153426", "0.58677566", "0.57724947", "0.57413626", "0.57413626", "0.57413626", "0.569063", "0.5663217", "0.5560472", "0.5520541", "0.5471246", "0.5470642", "0.54595786", "0.54445577", "0.54018646", "0.5377396", "0.5370092", "0.5359...
0.7300358
0
Dataset used to retrieve foreign keys referenced by a table
def _foreign_key_list_ds @_foreign_key_list_ds ||= __foreign_key_list_ds(false) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __foreign_key_list_ds(reverse)\n if reverse\n ctable = Sequel[:att2]\n cclass = Sequel[:cl2]\n rtable = Sequel[:att]\n rclass = Sequel[:cl]\n else\n ctable = Sequel[:att]\n cclass = Sequel[:cl]\n rtable = Sequel[:att2]\n rcla...
[ "0.7179927", "0.69182503", "0.6904587", "0.68861365", "0.68154234", "0.66139185", "0.6553873", "0.6553873", "0.65525526", "0.6544885", "0.6445108", "0.6381183", "0.63455", "0.63219166", "0.6265057", "0.62337005", "0.62207776", "0.61979944", "0.6148675", "0.6110326", "0.608275...
0.670528
5
Dataset used to retrieve foreign keys referencing a table
def _reverse_foreign_key_list_ds @_reverse_foreign_key_list_ds ||= __foreign_key_list_ds(true) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __foreign_key_list_ds(reverse)\n if reverse\n ctable = Sequel[:att2]\n cclass = Sequel[:cl2]\n rtable = Sequel[:att]\n rclass = Sequel[:cl]\n else\n ctable = Sequel[:att]\n cclass = Sequel[:cl]\n rtable = Sequel[:att2]\n rcla...
[ "0.720176", "0.68655574", "0.6834935", "0.6735687", "0.6703324", "0.6665703", "0.651877", "0.6441737", "0.640113", "0.640113", "0.637693", "0.63623524", "0.6326083", "0.6310065", "0.6262004", "0.6242311", "0.61340344", "0.6112476", "0.60712636", "0.60219014", "0.60215473", ...
0.6119571
17
Build dataset used for foreign key list methods.
def __foreign_key_list_ds(reverse) if reverse ctable = Sequel[:att2] cclass = Sequel[:cl2] rtable = Sequel[:att] rclass = Sequel[:cl] else ctable = Sequel[:att] cclass = Sequel[:cl] rtable = Sequel[:att2] rclass = Sequel[:cl2] end if server_version >= 90500 cpos = Sequel.expr{array_position(co[:conkey], ctable[:attnum])} rpos = Sequel.expr{array_position(co[:confkey], rtable[:attnum])} # :nocov: else range = 0...32 cpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:conkey], [x]), x]}, 32, ctable[:attnum])} rpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:confkey], [x]), x]}, 32, rtable[:attnum])} # :nocov: end ds = metadata_dataset. from{pg_constraint.as(:co)}. join(Sequel[:pg_class].as(cclass), :oid=>:conrelid). join(Sequel[:pg_attribute].as(ctable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])). join(Sequel[:pg_class].as(rclass), :oid=>Sequel[:co][:confrelid]). join(Sequel[:pg_attribute].as(rtable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:confkey])). join(Sequel[:pg_namespace].as(:nsp), :oid=>Sequel[:cl2][:relnamespace]). order{[co[:conname], cpos]}. where{{ cl[:relkind]=>%w'r p', co[:contype]=>'f', cpos=>rpos }}. select{[ co[:conname].as(:name), ctable[:attname].as(:column), co[:confupdtype].as(:on_update), co[:confdeltype].as(:on_delete), cl2[:relname].as(:table), rtable[:attname].as(:refcolumn), SQL::BooleanExpression.new(:AND, co[:condeferrable], co[:condeferred]).as(:deferrable), nsp[:nspname].as(:schema) ]} if reverse ds = ds.order_append(Sequel[:nsp][:nspname], Sequel[:cl2][:relname]) end ds end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _foreign_key_list_ds\n @_foreign_key_list_ds ||= __foreign_key_list_ds(false)\n end", "def _reverse_foreign_key_list_ds\n @_reverse_foreign_key_list_ds ||= __foreign_key_list_ds(true)\n end", "def build!(data_set_name)\n interface(data_set_name).create_empty_data_set\n ...
[ "0.6950944", "0.61976147", "0.6181576", "0.57991093", "0.5773287", "0.56557083", "0.5640482", "0.5637681", "0.5618548", "0.5608803", "0.56053627", "0.55660987", "0.5563028", "0.55293816", "0.5527643", "0.55013406", "0.5487018", "0.5480283", "0.5435237", "0.5426875", "0.541391...
0.68358237
1
Dataset used to retrieve index information
def _indexes_ds @_indexes_ds ||= begin if server_version >= 90500 order = [Sequel[:indc][:relname], Sequel.function(:array_position, Sequel[:ind][:indkey], Sequel[:att][:attnum])] # :nocov: else range = 0...32 order = [Sequel[:indc][:relname], SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(Sequel[:ind][:indkey], [x]), x]}, 32, Sequel[:att][:attnum])] # :nocov: end attnums = SQL::Function.new(:ANY, Sequel[:ind][:indkey]) ds = metadata_dataset. from{pg_class.as(:tab)}. join(Sequel[:pg_index].as(:ind), :indrelid=>:oid). join(Sequel[:pg_class].as(:indc), :oid=>:indexrelid). join(Sequel[:pg_attribute].as(:att), :attrelid=>Sequel[:tab][:oid], :attnum=>attnums). left_join(Sequel[:pg_constraint].as(:con), :conname=>Sequel[:indc][:relname]). where{{ indc[:relkind]=>%w'i I', ind[:indisprimary]=>false, :indexprs=>nil, :indisvalid=>true}}. order(*order). select{[indc[:relname].as(:name), ind[:indisunique].as(:unique), att[:attname].as(:column), con[:condeferrable].as(:deferrable)]} # :nocov: ds = ds.where(:indisready=>true) if server_version >= 80300 ds = ds.where(:indislive=>true) if server_version >= 90300 # :nocov: ds end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def index\n @dataset_data = DatasetDatum.all\n end", "def indices\n @data.keys\n end", "def construct_index\n end", "def dataset(index)\n idx_name = IndexName[index]\n Dataset.new(client, params: { index: idx_name.to_sym })\n end", "def indexed\n self['indexed']\n end",...
[ "0.71727204", "0.69043535", "0.66589785", "0.6559253", "0.6507489", "0.6449275", "0.6387849", "0.6387849", "0.6387849", "0.63644105", "0.63298535", "0.63144547", "0.6288391", "0.6281459", "0.6281459", "0.6281459", "0.62788105", "0.62172794", "0.62114143", "0.6202234", "0.6196...
0.70689636
1
Dataset used to determine custom serial sequences for tables
def _select_custom_sequence_ds @_select_custom_sequence_ds ||= metadata_dataset. from{pg_class.as(:t)}. join(:pg_namespace, {:oid => :relnamespace}, :table_alias=>:name). join(:pg_attribute, {:attrelid => Sequel[:t][:oid]}, :table_alias=>:attr). join(:pg_attrdef, {:adrelid => :attrelid, :adnum => :attnum}, :table_alias=>:def). join(:pg_constraint, {:conrelid => :adrelid, Sequel[:cons][:conkey].sql_subscript(1) => :adnum}, :table_alias=>:cons). where{{cons[:contype] => 'p', pg_get_expr(self.def[:adbin], attr[:attrelid]) => /nextval/i}}. select{ expr = split_part(pg_get_expr(self.def[:adbin], attr[:attrelid]), "'", 2) [ name[:nspname].as(:schema), Sequel.case({{expr => /./} => substr(expr, strpos(expr, '.')+1)}, expr).as(:sequence) ] } end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _select_serial_sequence_ds\n @_serial_sequence_ds ||= metadata_dataset.\n from{[\n pg_class.as(:seq),\n pg_attribute.as(:attr),\n pg_depend.as(:dep),\n pg_namespace.as(:name),\n pg_constraint.as(:cons),\n pg_class.as(:t)\n ...
[ "0.700258", "0.56914324", "0.5634827", "0.55601764", "0.55468976", "0.53256935", "0.5324605", "0.52987653", "0.52987653", "0.52441895", "0.52351105", "0.5192623", "0.5109223", "0.5084473", "0.50460196", "0.503191", "0.5023268", "0.501827", "0.49391106", "0.49349368", "0.49286...
0.6921501
1
Dataset used to determine normal serial sequences for tables
def _select_serial_sequence_ds @_serial_sequence_ds ||= metadata_dataset. from{[ pg_class.as(:seq), pg_attribute.as(:attr), pg_depend.as(:dep), pg_namespace.as(:name), pg_constraint.as(:cons), pg_class.as(:t) ]}. where{[ [seq[:oid], dep[:objid]], [seq[:relnamespace], name[:oid]], [seq[:relkind], 'S'], [attr[:attrelid], dep[:refobjid]], [attr[:attnum], dep[:refobjsubid]], [attr[:attrelid], cons[:conrelid]], [attr[:attnum], cons[:conkey].sql_subscript(1)], [attr[:attrelid], t[:oid]], [cons[:contype], 'p'] ]}. select{[ name[:nspname].as(:schema), seq[:relname].as(:sequence) ]} end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _select_custom_sequence_ds\n @_select_custom_sequence_ds ||= metadata_dataset.\n from{pg_class.as(:t)}.\n join(:pg_namespace, {:oid => :relnamespace}, :table_alias=>:name).\n join(:pg_attribute, {:attrelid => Sequel[:t][:oid]}, :table_alias=>:attr).\n join(:pg_attrdef...
[ "0.60077465", "0.52115154", "0.5192342", "0.5162852", "0.5099557", "0.5063107", "0.50618666", "0.5042069", "0.5014444", "0.5005303", "0.4981823", "0.49606812", "0.49182153", "0.49182153", "0.49060225", "0.489107", "0.48698348", "0.48606262", "0.48503137", "0.48341665", "0.482...
0.6579111
0
Dataset used to determine primary keys for tables
def _select_pk_ds @_select_pk_ds ||= metadata_dataset. from(:pg_class, :pg_attribute, :pg_index, :pg_namespace). where{[ [pg_class[:oid], pg_attribute[:attrelid]], [pg_class[:relnamespace], pg_namespace[:oid]], [pg_class[:oid], pg_index[:indrelid]], [pg_index[:indkey].sql_subscript(0), pg_attribute[:attnum]], [pg_index[:indisprimary], 't'] ]}. select{pg_attribute[:attname].as(:pk)} end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def primary_key(table)\n t = dataset.send(:input_identifier, table)\n @primary_keys.fetch(t) do\n pk = fetch(\"SELECT RDB$FIELD_NAME FROM RDB$INDEX_SEGMENTS NATURAL JOIN RDB$RELATION_CONSTRAINTS WHERE RDB$CONSTRAINT_TYPE = 'PRIMARY KEY' AND RDB$RELATION_NAME = ?\", t).single_value\n ...
[ "0.77333367", "0.7475051", "0.7425205", "0.7375289", "0.7366104", "0.72455585", "0.7180687", "0.7167466", "0.7092792", "0.70765555", "0.706616", "0.7043821", "0.69874865", "0.68165034", "0.68044215", "0.6772434", "0.67652905", "0.675731", "0.67554665", "0.6754021", "0.6702185...
0.75759834
1
Dataset used to get schema for tables
def _schema_ds @_schema_ds ||= begin ds = metadata_dataset.select{[ pg_attribute[:attname].as(:name), SQL::Cast.new(pg_attribute[:atttypid], :integer).as(:oid), SQL::Cast.new(basetype[:oid], :integer).as(:base_oid), SQL::Function.new(:format_type, basetype[:oid], pg_type[:typtypmod]).as(:db_base_type), SQL::Function.new(:format_type, pg_type[:oid], pg_attribute[:atttypmod]).as(:db_type), SQL::Function.new(:pg_get_expr, pg_attrdef[:adbin], pg_class[:oid]).as(:default), SQL::BooleanExpression.new(:NOT, pg_attribute[:attnotnull]).as(:allow_null), SQL::Function.new(:COALESCE, SQL::BooleanExpression.from_value_pairs(pg_attribute[:attnum] => SQL::Function.new(:ANY, pg_index[:indkey])), false).as(:primary_key), Sequel[:pg_type][:typtype], (~Sequel[Sequel[:elementtype][:oid]=>nil]).as(:is_array), ]}. from(:pg_class). join(:pg_attribute, :attrelid=>:oid). join(:pg_type, :oid=>:atttypid). left_outer_join(Sequel[:pg_type].as(:basetype), :oid=>:typbasetype). left_outer_join(Sequel[:pg_type].as(:elementtype), :typarray=>Sequel[:pg_type][:oid]). left_outer_join(:pg_attrdef, :adrelid=>Sequel[:pg_class][:oid], :adnum=>Sequel[:pg_attribute][:attnum]). left_outer_join(:pg_index, :indrelid=>Sequel[:pg_class][:oid], :indisprimary=>true). where{{pg_attribute[:attisdropped]=>false}}. where{pg_attribute[:attnum] > 0}. order{pg_attribute[:attnum]} # :nocov: if server_version > 100000 # :nocov: ds = ds.select_append{pg_attribute[:attidentity]} # :nocov: if server_version > 120000 # :nocov: ds = ds.select_append{Sequel.~(pg_attribute[:attgenerated]=>'').as(:generated)} end end ds end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def schema_ds_dataset\n schema_utility_dataset\n end", "def schema_and_table(table_name)\n schema_utility_dataset.schema_and_table(table_name)\n end", "def schema_ds_from(table_name, opts)\n [:information_schema__tables___t]\n end", "def tables(opts={})\n m = output_ide...
[ "0.76885384", "0.740642", "0.72837543", "0.72820276", "0.7032066", "0.7010101", "0.68253994", "0.68143594", "0.68125886", "0.67858446", "0.67315453", "0.6719681", "0.6707291", "0.67013437", "0.6655032", "0.66547054", "0.66547054", "0.6615402", "0.65927154", "0.6584639", "0.65...
0.73695695
2
If the :synchronous option is given and nonnil, set synchronous_commit appropriately. Valid values for the :synchronous option are true, :on, false, :off, :local, and :remote_write.
def begin_new_transaction(conn, opts) super if opts.has_key?(:synchronous) case sync = opts[:synchronous] when true sync = :on when false sync = :off when nil return end log_connection_execute(conn, "SET LOCAL synchronous_commit = #{sync}") end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def synchronous_commit=(value)\n raise ArgumentError, \"argument must be true or false\" unless value == true || value == false\n\n execute \"SET #{'LOCAL' if open_transactions > 0} synchronous_commit TO #{value ? 'ON' : 'OFF'}\"\n end", "def synchronous!\n @asynchronous = false\n end", ...
[ "0.79772484", "0.6915001", "0.59547013", "0.5716413", "0.5692907", "0.5628751", "0.5575133", "0.5575133", "0.5518805", "0.5504294", "0.5468508", "0.53467554", "0.53143233", "0.53134865", "0.52963495", "0.5211512", "0.5206623", "0.51856744", "0.5146709", "0.51061195", "0.50953...
0.7401804
1
Set the READ ONLY transaction setting per savepoint, as PostgreSQL supports that.
def begin_savepoint(conn, opts) super unless (read_only = opts[:read_only]).nil? log_connection_execute(conn, "SET TRANSACTION READ #{read_only ? 'ONLY' : 'WRITE'}") end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def with_transaction(read_only: false, &block)\n @env.transaction(read_only, &block)\n end", "def set_transaction_isolation(conn, opts)\n level = opts.fetch(:isolation, transaction_isolation_level)\n read_only = opts[:read_only]\n deferrable = opts[:deferrable]\n if level || !...
[ "0.6960527", "0.6353735", "0.62602395", "0.60868007", "0.6068373", "0.60130394", "0.5990826", "0.58925563", "0.5890557", "0.5845283", "0.57550824", "0.57550824", "0.5740366", "0.56175214", "0.56175214", "0.55855775", "0.5568516", "0.5552127", "0.55378616", "0.5520403", "0.550...
0.7488052
0
Literalize nonString collate options. This is because unquoted collatations are folded to lowercase, and PostgreSQL used mixed case or capitalized collations.
def column_definition_collate_sql(sql, column) if collate = column[:collate] collate = literal(collate) unless collate.is_a?(String) sql << " COLLATE #{collate}" end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def collation\n nil\n end", "def collation\n select_value(\n \"SELECT pg_database.datcollate\" <<\n \" FROM pg_database\" <<\n \" WHERE pg_database.datname LIKE '#{current_database}'\",\n 'SCHEMA')\n end", "def column_definition_collate_sql(sql, column)\n if colla...
[ "0.61051226", "0.5877964", "0.584784", "0.5752945", "0.5641679", "0.55594814", "0.553016", "0.5495804", "0.5466278", "0.5410757", "0.5399953", "0.538035", "0.5236631", "0.51445156", "0.5142646", "0.5113955", "0.51080203", "0.51070803", "0.5061387", "0.50466436", "0.50328183",...
0.5957464
1
Support identity columns, but only use the identity SQL syntax if no default value is given.
def column_definition_default_sql(sql, column) super if !column[:serial] && !['smallserial', 'serial', 'bigserial'].include?(column[:type].to_s) && !column[:default] if (identity = column[:identity]) sql << " GENERATED " sql << (identity == :always ? "ALWAYS" : "BY DEFAULT") sql << " AS IDENTITY" elsif (generated = column[:generated_always_as]) sql << " GENERATED ALWAYS AS (#{literal(generated)}) STORED" end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pre_insert(sql, name, pk, id_value, sequence_name)\n @iiTable = get_table_name(sql)\n @iiCol = get_autounique_column(@iiTable)\n @iiEnabled = false\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n @connection.do(enable_identity_insert(@iiTable, tru...
[ "0.6527207", "0.6237912", "0.62254494", "0.619042", "0.61669624", "0.61669624", "0.6040682", "0.6005519", "0.6005519", "0.59868515", "0.5979152", "0.59677976", "0.59677976", "0.59347665", "0.58789515", "0.5854652", "0.5758402", "0.57253426", "0.57253426", "0.5648476", "0.5648...
0.6951704
0
Handle PostgreSQL specific default format.
def column_schema_normalize_default(default, type) if m = /\A(?:B?('.*')::[^']+|\((-?\d+(?:\.\d+)?)\))\z/.match(default) default = m[1] || m[2] end super(default, type) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def default_format=(format)\n @default_format = format\n end", "def default_format\n @default_format ||= :html\n end", "def set_default_format\n @default_format=\"pdf\"\n end", "def set_date_format(format) \n unless format.nil?\n return format\n else\n return @@db_format\n ...
[ "0.62009215", "0.5986719", "0.5925541", "0.5899727", "0.58987576", "0.58987576", "0.5891657", "0.58241314", "0.57326424", "0.5712006", "0.5712006", "0.5691154", "0.5628859", "0.5605188", "0.5576969", "0.5564784", "0.55390805", "0.5512747", "0.55099905", "0.55004245", "0.54813...
0.5733273
8
If the :prepare option is given and we aren't in a savepoint, prepare the transaction for a twophase commit.
def commit_transaction(conn, opts=OPTS) if (s = opts[:prepare]) && savepoint_level(conn) <= 1 log_connection_execute(conn, "PREPARE TRANSACTION #{literal(s)}") else super end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def on_prepare_transaction_commit(unit, transaction); end", "def commit_transaction(conn, opts=OPTS)\n if in_savepoint?(conn)\n if supports_releasing_savepoints?\n log_connection_yield('Transaction.release_savepoint', conn){conn.release_savepoint(savepoint_obj(conn))}\n end\n ...
[ "0.6718019", "0.662396", "0.6306131", "0.6208742", "0.61423445", "0.6091793", "0.6091793", "0.6078697", "0.6044119", "0.59700394", "0.5848423", "0.58226323", "0.58226323", "0.57957965", "0.5729422", "0.56911594", "0.56845605", "0.56661", "0.56661", "0.56661", "0.5664158", "...
0.71222514
0
PostgreSQL can't combine rename_column operations, and it can combine the custom validate_constraint operation.
def combinable_alter_table_op?(op) (super || op[:op] == :validate_constraint) && op[:op] != :rename_column end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def undo_cleanup_concurrent_column_rename(table, old_column, new_column, type: nil, batch_column_name: :id)\n Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas.require_ddl_mode!\n\n setup_renamed_column(__callee__, table, new_column, old_column, type, batch_column_name)\n\n with_l...
[ "0.6728584", "0.65458584", "0.5705936", "0.5677713", "0.56549233", "0.5590001", "0.55832726", "0.55148995", "0.55096465", "0.5468966", "0.5393673", "0.53134495", "0.531104", "0.5295988", "0.528086", "0.5262339", "0.52330846", "0.5218797", "0.5215299", "0.5191192", "0.5191192"...
0.64177334
2
The SQL queries to execute when starting a new connection.
def connection_configuration_sqls(opts=@opts) sqls = [] sqls << "SET standard_conforming_strings = ON" if typecast_value_boolean(opts.fetch(:force_standard_strings, true)) cmm = opts.fetch(:client_min_messages, :warning) if cmm && !cmm.to_s.empty? cmm = cmm.to_s.upcase.strip unless VALID_CLIENT_MIN_MESSAGES.include?(cmm) raise Error, "Unsupported client_min_messages setting: #{cmm}" end sqls << "SET client_min_messages = '#{cmm.to_s.upcase}'" end if search_path = opts[:search_path] case search_path when String search_path = search_path.split(",").map(&:strip) when Array # nil else raise Error, "unrecognized value for :search_path option: #{search_path.inspect}" end sqls << "SET search_path = #{search_path.map{|s| "\"#{s.gsub('"', '""')}\""}.join(',')}" end sqls end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sqls\n @mutex.synchronize do\n s = @sqls.dup\n @sqls.clear\n s\n end\n end", "def prepare_sql_statements\n begin\n databases.each do |db|\n create_query[db][0] = create_query[db][0] + \" \" + create_query[db][1]\n create_query[db].de...
[ "0.6558", "0.6437771", "0.6432272", "0.64197713", "0.64197713", "0.6389099", "0.63364065", "0.6318091", "0.63028187", "0.62802106", "0.62638634", "0.6246098", "0.6229926", "0.61795676", "0.61747974", "0.61747974", "0.6153917", "0.6148627", "0.6138928", "0.6134228", "0.6104814...
0.57855695
41
SQL for doing fast table insert from stdin.
def copy_into_sql(table, opts) sql = String.new sql << "COPY #{literal(table)}" if cols = opts[:columns] sql << literal(Array(cols)) end sql << " FROM STDIN" if opts[:options] || opts[:format] sql << " (" sql << "FORMAT #{opts[:format]}" if opts[:format] sql << "#{', ' if opts[:format]}#{opts[:options]}" if opts[:options] sql << ')' end sql end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def format table, keys, values\n <<-SQL\nCOPY #{table} (#{keys.join(', ')}) FROM stdin;\n#{values.collect { |row| row.join \"\\t\" }.join \"\\n\"}\n\\\\.\n SQL\n end", "def fast_insert(rows, base_cmd, end_cmd = '')\n RawDB.fast_insert(db, rows, base_cmd, end_cmd)\n end", "def row...
[ "0.6569321", "0.65120876", "0.6489801", "0.64708847", "0.6438791", "0.63468456", "0.622612", "0.6213247", "0.62104845", "0.6193804", "0.61814976", "0.6177956", "0.61522686", "0.6113579", "0.61133456", "0.6103705", "0.6092123", "0.60913783", "0.60901535", "0.60656404", "0.6050...
0.6786813
0
SQL for doing fast table output to stdout.
def copy_table_sql(table, opts) if table.is_a?(String) table else if opts[:options] || opts[:format] options = String.new options << " (" options << "FORMAT #{opts[:format]}" if opts[:format] options << "#{', ' if opts[:format]}#{opts[:options]}" if opts[:options] options << ')' end table = if table.is_a?(::Sequel::Dataset) "(#{table.sql})" else literal(table) end "COPY #{table} TO STDOUT#{options}" end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def OLDview_data db, sql, options\n outputfile = options[:output_to]\n formatting = options[:formatting]\n headers = options[:headers]\n #str = db.get_data sql\n rs = db.execute_query sql\n str = rs.content\n columns = rs.columns\n #puts \"SQL: #{sql}.\\nstr: #{str.size}\"\n data = []\n if headers\n d...
[ "0.66475284", "0.6606472", "0.65019464", "0.63997036", "0.6392478", "0.63616675", "0.62400645", "0.62208533", "0.61771643", "0.61625415", "0.6158442", "0.6136209", "0.6056827", "0.6039973", "0.6032121", "0.58631384", "0.58205557", "0.5808749", "0.5798774", "0.5790676", "0.579...
0.5837525
16
SQL statement to create database function.
def create_function_sql(name, definition, opts=OPTS) args = opts[:args] if !opts[:args].is_a?(Array) || !opts[:args].any?{|a| Array(a).length == 3 and %w'OUT INOUT'.include?(a[2].to_s)} returns = opts[:returns] || 'void' end language = opts[:language] || 'SQL' <<-END CREATE#{' OR REPLACE' if opts[:replace]} FUNCTION #{name}#{sql_function_args(args)} #{"RETURNS #{returns}" if returns} LANGUAGE #{language} #{opts[:behavior].to_s.upcase if opts[:behavior]} #{'STRICT' if opts[:strict]} #{'SECURITY DEFINER' if opts[:security_definer]} #{"PARALLEL #{opts[:parallel].to_s.upcase}" if opts[:parallel]} #{"COST #{opts[:cost]}" if opts[:cost]} #{"ROWS #{opts[:rows]}" if opts[:rows]} #{opts[:set].map{|k,v| " SET #{k} = #{v}"}.join("\n") if opts[:set]} AS #{literal(definition.to_s)}#{", #{literal(opts[:link_symbol].to_s)}" if opts[:link_symbol]} END end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def name\n\t\t\"db_fun\"\n\tend", "def create_function(name, definition, opts=OPTS)\n self << create_function_sql(name, definition, opts)\n end", "def function(name, *args)\n SQL::Function.new(name, *args)\n end", "def create_function(function_name, returning, definition, options = {}...
[ "0.73484826", "0.72461474", "0.72118765", "0.7096132", "0.7025482", "0.6792593", "0.64558375", "0.64074355", "0.6336611", "0.63190633", "0.63179994", "0.6292024", "0.6202775", "0.6190527", "0.61772263", "0.61308116", "0.61014944", "0.6098711", "0.6088275", "0.60713166", "0.60...
0.7258271
1
SQL for creating a procedural language.
def create_language_sql(name, opts=OPTS) "CREATE#{' OR REPLACE' if opts[:replace] && server_version >= 90000}#{' TRUSTED' if opts[:trusted]} LANGUAGE #{name}#{" HANDLER #{opts[:handler]}" if opts[:handler]}#{" VALIDATOR #{opts[:validator]}" if opts[:validator]}" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_language(name, opts=OPTS)\n self << create_language_sql(name, opts)\n end", "def make_statement\n end", "def make_statement\n end", "def sql\n @parser.sql\n end", "def sql\n @parser.sql\n end", "def add_code(cheatsheet_db, language, type, code, comment)\n cheatsh...
[ "0.61669266", "0.6121147", "0.6121147", "0.59831965", "0.59831965", "0.58226854", "0.57331324", "0.5722036", "0.5647145", "0.56217813", "0.5590047", "0.55899316", "0.55826867", "0.557583", "0.55719095", "0.5558236", "0.55524683", "0.5549", "0.55081385", "0.54753685", "0.54551...
0.71067977
0
Create a partition of another table, used when the create_table with the :partition_of option is given.
def create_partition_of_table_from_generator(name, generator, options) execute_ddl(create_partition_of_table_sql(name, generator, options)) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_table(name, options=OPTS, &block)\n if options[:partition_of]\n create_partition_of_table_from_generator(name, CreatePartitionOfTableGenerator.new(&block), options)\n return\n end\n\n super\n end", "def create_range_partition(table_name, options, &block)\n ...
[ "0.7107491", "0.69653577", "0.6848916", "0.6785471", "0.66725737", "0.6580608", "0.6522106", "0.64445055", "0.64298666", "0.6402236", "0.6396007", "0.61293477", "0.6125929", "0.60874057", "0.6074051", "0.60464454", "0.5834928", "0.5782722", "0.57125735", "0.5685198", "0.56286...
0.681942
3
SQL for creating a partition of another table.
def create_partition_of_table_sql(name, generator, options) sql = create_table_prefix_sql(name, options).dup sql << " PARTITION OF #{quote_schema_table(options[:partition_of])}" case generator.partition_type when :range from, to = generator.range sql << " FOR VALUES FROM #{literal(from)} TO #{literal(to)}" when :list sql << " FOR VALUES IN #{literal(generator.list)}" when :hash mod, remainder = generator.hash_values sql << " FOR VALUES WITH (MODULUS #{literal(mod)}, REMAINDER #{literal(remainder)})" else # when :default sql << " DEFAULT" end sql << create_table_suffix_sql(name, options) sql end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_partition_of_table_from_generator(name, generator, options)\n execute_ddl(create_partition_of_table_sql(name, generator, options))\n end", "def create_range_partition(table_name, options, &block)\n raise ArgumentError, 'partition_key must be defined' if options[:partition_key].nil?\n\...
[ "0.7089116", "0.6901333", "0.668798", "0.6673402", "0.6528554", "0.64039016", "0.62698126", "0.62523615", "0.6106679", "0.60739046", "0.6064444", "0.6059633", "0.60556954", "0.6045033", "0.60409546", "0.59562093", "0.5860424", "0.5821033", "0.5763657", "0.57181513", "0.569176...
0.7671302
0
SQL for creating a schema.
def create_schema_sql(name, opts=OPTS) "CREATE SCHEMA #{'IF NOT EXISTS ' if opts[:if_not_exists]}#{quote_identifier(name)}#{" AUTHORIZATION #{literal(opts[:owner])}" if opts[:owner]}" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_schema(schema)\n execute \"CREATE SCHEMA #{schema}\", 'Create Schema'\n end", "def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end", "def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end", "def...
[ "0.8491162", "0.8416065", "0.8397193", "0.8367142", "0.8297997", "0.7959022", "0.7633281", "0.7361771", "0.7265676", "0.725682", "0.7212021", "0.7210765", "0.7127146", "0.7030964", "0.69537985", "0.6888085", "0.6867996", "0.6858188", "0.685559", "0.6830588", "0.6817735", "0...
0.81587696
5
DDL statement for creating a table with the given name, columns, and options
def create_table_prefix_sql(name, options) prefix_sql = if options[:temp] raise(Error, "can't provide both :temp and :unlogged to create_table") if options[:unlogged] raise(Error, "can't provide both :temp and :foreign to create_table") if options[:foreign] temporary_table_sql elsif options[:foreign] raise(Error, "can't provide both :foreign and :unlogged to create_table") if options[:unlogged] 'FOREIGN ' elsif options[:unlogged] 'UNLOGGED ' end "CREATE #{prefix_sql}TABLE#{' IF NOT EXISTS' if options[:if_not_exists]} #{options[:temp] ? quote_identifier(name) : quote_schema_table(name)}" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_table_sql_list(name, columns, indexes = nil, options = {})\n sql = [\"CREATE TABLE #{quote_schema_table(name)} (#{column_list_sql(columns)})\"]\n sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?\n sql\n end", "def create_table_sql_list(name, columns, index...
[ "0.75814897", "0.73549694", "0.7348603", "0.7305695", "0.72647333", "0.72407156", "0.7212776", "0.71801597", "0.7177178", "0.71413916", "0.71264935", "0.7111634", "0.71094227", "0.7092248", "0.708161", "0.7043943", "0.70318645", "0.69537044", "0.6920031", "0.6907666", "0.6892...
0.68803096
21
SQL for creating a table with PostgreSQL specific options
def create_table_sql(name, generator, options) "#{super}#{create_table_suffix_sql(name, options)}" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_table(create_table_options = {})\n self.connection.create_table(table_name, create_table_options) do |t|\n t.column :undone, :boolean, :default => false, :null => false\n t.column :description, :string\n t.column :updated_at, :timestamp\n end\n end", "def ...
[ "0.7415012", "0.72220755", "0.7216366", "0.7109741", "0.70924604", "0.7069454", "0.7053471", "0.7051891", "0.70443887", "0.70225674", "0.6937419", "0.6934067", "0.6926212", "0.68763155", "0.6873812", "0.6843173", "0.68393475", "0.6811212", "0.68088245", "0.672029", "0.6697852...
0.672513
19
Handle various PostgreSQl specific table extensions such as inheritance, partitioning, tablespaces, and foreign tables.
def create_table_suffix_sql(name, options) sql = String.new if inherits = options[:inherits] sql << " INHERITS (#{Array(inherits).map{|t| quote_schema_table(t)}.join(', ')})" end if partition_by = options[:partition_by] sql << " PARTITION BY #{options[:partition_type]||'RANGE'} #{literal(Array(partition_by))}" end if on_commit = options[:on_commit] raise(Error, "can't provide :on_commit without :temp to create_table") unless options[:temp] raise(Error, "unsupported on_commit option: #{on_commit.inspect}") unless ON_COMMIT.has_key?(on_commit) sql << " ON COMMIT #{ON_COMMIT[on_commit]}" end if tablespace = options[:tablespace] sql << " TABLESPACE #{quote_identifier(tablespace)}" end if server = options[:foreign] sql << " SERVER #{quote_identifier(server)}" if foreign_opts = options[:options] sql << " OPTIONS (#{foreign_opts.map{|k, v| "#{k} #{literal(v.to_s)}"}.join(', ')})" end end sql end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extended_table(extra = nil, **opt)\n sql = sql_extended_table(extra, **opt)\n ActiveRecord::Base.connection.exec_query(sql)\n end", "def inherited_tables\n tables = query(<<-SQL, 'SCHEMA')\n SELECT child.relname AS table_name,\n array_agg(parent....
[ "0.64756", "0.60287654", "0.5837757", "0.5837757", "0.5830948", "0.58300763", "0.5774674", "0.57068306", "0.56986165", "0.56673163", "0.56058055", "0.55958223", "0.55805767", "0.55375665", "0.54677045", "0.5446681", "0.541099", "0.54074264", "0.54019165", "0.5395756", "0.5389...
0.5480449
14
SQL for creating a database trigger.
def create_trigger_sql(table, name, function, opts=OPTS) events = opts[:events] ? Array(opts[:events]) : [:insert, :update, :delete] whence = opts[:after] ? 'AFTER' : 'BEFORE' if filter = opts[:when] raise Error, "Trigger conditions are not supported for this database" unless supports_trigger_conditions? filter = " WHEN #{filter_expr(filter)}" end "CREATE #{'OR REPLACE ' if opts[:replace]}TRIGGER #{name} #{whence} #{events.map{|e| e.to_s.upcase}.join(' OR ')} ON #{quote_schema_table(table)}#{' FOR EACH ROW' if opts[:each_row]}#{filter} EXECUTE PROCEDURE #{function}(#{Array(opts[:args]).map{|a| literal(a)}.join(', ')})" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_trigger(table_name, proc_name, event, options = {})\n\n end", "def create_trigger(table, name, function, opts=OPTS)\n self << create_trigger_sql(table, name, function, opts)\n end", "def createTrigger _args\n \"createTrigger _args;\" \n end", "def create_trigger name, type, ta...
[ "0.7504239", "0.7398713", "0.71695596", "0.716204", "0.69328403", "0.6909614", "0.6909614", "0.68390316", "0.6784707", "0.65625846", "0.62844837", "0.6207567", "0.6205865", "0.6203629", "0.6160155", "0.6159027", "0.60619485", "0.6052486", "0.6001266", "0.59768575", "0.5899913...
0.73837554
2
DDL fragment for initial part of CREATE VIEW statement
def create_view_prefix_sql(name, options) sql = create_view_sql_append_columns("CREATE #{'OR REPLACE 'if options[:replace]}#{'TEMPORARY 'if options[:temp]}#{'RECURSIVE ' if options[:recursive]}#{'MATERIALIZED ' if options[:materialized]}VIEW #{quote_schema_table(name)}", options[:columns] || options[:recursive]) if options[:security_invoker] sql += " WITH (security_invoker)" end if tablespace = options[:tablespace] sql += " TABLESPACE #{quote_identifier(tablespace)}" end sql end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_view_prefix_sql(name, options)\n create_view_sql_append_columns(\"CREATE #{'OR REPLACE 'if options[:replace]}VIEW #{quote_schema_table(name)}\", options[:columns])\n end", "def create_view(name, source)\n source = source.sql if source.is_a?(Dataset)\n execute(\"CREATE VIEW #{name} AS...
[ "0.76422477", "0.7080754", "0.69882655", "0.6876096", "0.6707558", "0.6691078", "0.66169316", "0.6589881", "0.65825975", "0.6561772", "0.6553956", "0.638153", "0.6370509", "0.6340597", "0.62826496", "0.6160476", "0.615917", "0.61341274", "0.601144", "0.5997561", "0.5932272", ...
0.7618731
1
SQL for dropping a function from the database.
def drop_function_sql(name, opts=OPTS) "DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts[:cascade]}" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sql_for_remove\n \"DROP FUNCTION IF EXISTS #{proname}(#{get_function_args})\"\n end", "def drop_function(name, opts=OPTS)\n self << drop_function_sql(name, opts)\n end", "def drop_function(function_name, options = {})\n function_name = full_function_name(function_name, options)\n\n ...
[ "0.8369946", "0.80704874", "0.8003502", "0.7982197", "0.7879706", "0.76229805", "0.76229805", "0.75548965", "0.7380597", "0.71375334", "0.71096754", "0.70644176", "0.68965197", "0.68872184", "0.68156517", "0.6793039", "0.67553425", "0.6700025", "0.66806155", "0.66347694", "0....
0.8339578
1
Support :if_exists, :cascade, and :concurrently options.
def drop_index_sql(table, op) sch, _ = schema_and_table(table) "DROP INDEX#{' CONCURRENTLY' if op[:concurrently]}#{' IF EXISTS' if op[:if_exists]} #{"#{quote_identifier(sch)}." if sch}#{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}#{' CASCADE' if op[:cascade]}" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def continue_on_exists_proc; end", "def on_exists_proc; end", "def exists?; end", "def continue_on_exists_proc=(_arg0); end", "def on_exists_proc=(_arg0); end", "def exists?\n self.ensure == :present\n end", "def exists?\n false\n end", "def exists?\n false\n end", "def exists?\n tru...
[ "0.6839439", "0.6661225", "0.6396953", "0.6232628", "0.6209676", "0.60973483", "0.5978841", "0.5978841", "0.5968392", "0.5937667", "0.5933376", "0.59019464", "0.57371604", "0.57335764", "0.5651571", "0.5592412", "0.55716753", "0.5569275", "0.5567538", "0.5565487", "0.5559403"...
0.0
-1
SQL for dropping a procedural language from the database.
def drop_language_sql(name, opts=OPTS) "DROP LANGUAGE#{' IF EXISTS' if opts[:if_exists]} #{name}#{' CASCADE' if opts[:cascade]}" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def drop_language(name, opts=OPTS)\n self << drop_language_sql(name, opts)\n end", "def drop_sql\n raise NotImplementedError, \"DatabaseSymbol should not be instanciated\"\n end", "def drop_database_sql(name, opts = {})\n \"DROP DATABASE #{quote_identifier(name)}\"\n end", "def to_dro...
[ "0.74222535", "0.68442863", "0.6644301", "0.6460011", "0.6444636", "0.6371063", "0.6324901", "0.6263596", "0.6263596", "0.6256065", "0.6241722", "0.62375337", "0.620885", "0.6179907", "0.61783373", "0.6157233", "0.6125394", "0.60962826", "0.60942584", "0.607084", "0.60548294"...
0.81428945
0
SQL for dropping a schema from the database.
def drop_schema_sql(name, opts=OPTS) "DROP SCHEMA#{' IF EXISTS' if opts[:if_exists]} #{quote_identifier(name)}#{' CASCADE' if opts[:cascade]}" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)}\"\n end", "def drop_schema schema_name\n execute \"DROP SCHEMA #{schema_name} CASCADE\"\n end", "def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)} CASCADE\"\n end", "def dro...
[ "0.8680619", "0.85794634", "0.85016835", "0.84340376", "0.8426357", "0.829404", "0.82850105", "0.8108252", "0.80193865", "0.8011835", "0.77801085", "0.74915457", "0.7473172", "0.746467", "0.74538666", "0.7438913", "0.7438811", "0.73444384", "0.73317754", "0.7308943", "0.72807...
0.86468154
1